hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0ba5ab8ff1f6e2c05c4ed09c50365e9052e6d07c
| 9,983
|
py
|
Python
|
mayan/apps/documents/tests/test_document_version_page_api.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 2
|
2021-09-12T19:41:19.000Z
|
2021-09-12T19:41:20.000Z
|
mayan/apps/documents/tests/test_document_version_page_api.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 37
|
2021-09-13T01:00:12.000Z
|
2021-10-02T03:54:30.000Z
|
mayan/apps/documents/tests/test_document_version_page_api.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 1
|
2021-09-22T13:17:30.000Z
|
2021-09-22T13:17:30.000Z
|
from rest_framework import status
from mayan.apps.rest_api.tests.base import BaseAPITestCase
from ..events import (
event_document_version_page_created, event_document_version_page_deleted,
event_document_version_page_edited
)
from ..permissions import (
permission_document_version_edit, permission_document_version_view
)
from .mixins.document_mixins import DocumentTestMixin
from .mixins.document_version_mixins import (
DocumentVersionPageAPIViewTestMixin
)
class DocumentVersionPageAPIViewTestCase(
DocumentVersionPageAPIViewTestMixin, DocumentTestMixin, BaseAPITestCase
):
def test_document_version_page_create_api_view_no_permission(self):
document_version_count_page = self.test_document.version_active.version_pages.count()
self._clear_events()
response = self._request_test_document_version_page_create_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
self.test_document.version_active.version_pages.count(),
document_version_count_page
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_create_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
document_version_count_page = self.test_document.version_active.version_pages.count()
self._clear_events()
response = self._request_test_document_version_page_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
self.test_document.version_active.version_pages.count(),
document_version_count_page + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_version)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document_version_page)
self.assertEqual(events[0].verb, event_document_version_page_created.id)
def test_document_version_page_delete_api_view_no_permission(self):
document_version_count_page = self.test_document.version_active.version_pages.count()
self._clear_events()
response = self._request_test_document_version_page_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
self.test_document.version_active.version_pages.count(),
document_version_count_page
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_delete_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
document_version_count_page = self.test_document.version_active.version_pages.count()
self._clear_events()
response = self._request_test_document_version_page_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
self.test_document.version_active.version_pages.count(),
document_version_count_page - 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document_version)
self.assertEqual(events[0].verb, event_document_version_page_deleted.id)
def test_document_version_page_detail_api_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_page_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_detail_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_view
)
self._clear_events()
response = self._request_test_document_version_page_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_edit_via_patch_api_view_no_permission(self):
document_version_page_number = self.test_document_version_page.page_number
self._clear_events()
response = self._request_test_document_version_page_edit_via_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.test_document_version_page.refresh_from_db()
self.assertEqual(
self.test_document_version_page.page_number,
document_version_page_number
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_edit_via_patch_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
document_version_page_number = self.test_document_version_page.page_number
self._clear_events()
response = self._request_test_document_version_page_edit_via_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_document_version_page.refresh_from_db()
self.assertNotEqual(
self.test_document_version_page.page_number,
document_version_page_number
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_version)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document_version_page)
self.assertEqual(events[0].verb, event_document_version_page_edited.id)
def test_document_version_page_edit_via_put_api_view_no_permission(self):
document_version_page_number = self.test_document_version_page.page_number
self._clear_events()
response = self._request_test_document_version_page_edit_via_put_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.test_document_version_page.refresh_from_db()
self.assertEqual(
self.test_document_version_page.page_number,
document_version_page_number
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_edit_via_put_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_edit
)
document_version_page_number = self.test_document_version_page.page_number
self._clear_events()
response = self._request_test_document_version_page_edit_via_put_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_document_version_page.refresh_from_db()
self.assertNotEqual(
self.test_document_version_page.page_number,
document_version_page_number
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_version)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document_version_page)
self.assertEqual(events[0].verb, event_document_version_page_edited.id)
def test_document_version_page_image_api_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_page_image_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_image_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_view
)
self._clear_events()
response = self._request_test_document_version_page_image_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_list_api_view_no_permission(self):
self._clear_events()
response = self._request_test_document_version_page_list_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_version_page_list_api_view_with_access(self):
self.grant_access(
obj=self.test_document_version,
permission=permission_document_version_view
)
self._clear_events()
response = self._request_test_document_version_page_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['id'],
self.test_document_version_page.id
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
| 37.958175
| 94
| 0.709506
| 1,181
| 9,983
| 5.488569
| 0.066892
| 0.222154
| 0.184665
| 0.156125
| 0.929034
| 0.908516
| 0.907899
| 0.907899
| 0.888769
| 0.881055
| 0
| 0.009613
| 0.218471
| 9,983
| 262
| 95
| 38.103053
| 0.8212
| 0
| 0
| 0.708333
| 0
| 0
| 0.000926
| 0
| 0
| 0
| 0
| 0
| 0.276042
| 1
| 0.072917
| false
| 0
| 0.03125
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0bb44fc60574b7b7aca285bdb61e903b0e936056
| 13,781
|
py
|
Python
|
app3/models.py
|
wtianzi/SARWeb
|
e89d91deec19c2e3ef2ba65edb28b30c3e531f5a
|
[
"MIT"
] | null | null | null |
app3/models.py
|
wtianzi/SARWeb
|
e89d91deec19c2e3ef2ba65edb28b30c3e531f5a
|
[
"MIT"
] | 9
|
2020-02-11T23:43:52.000Z
|
2022-02-10T07:31:33.000Z
|
app3/models.py
|
wtianzi/SARWeb
|
e89d91deec19c2e3ef2ba65edb28b30c3e531f5a
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils.text import slugify
# Create your models here.
class Person(models.Model):
id = models.AutoField(primary_key=True)
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def __str__(self):
return self.first_name
class Task(models.Model):
id = models.AutoField(primary_key=True)
taskpolygon = models.TextField(blank=True,null=True)
notes = models.CharField(max_length=30)
taskid = models.CharField(max_length=100,blank=True,null=True)
def __str__(self):
return self.notes
class GPSData(models.Model):
#id = models.AutoField(primary_key=True)
deviceid = models.CharField(max_length=20,primary_key=True)
taskid = models.CharField(max_length=100,blank=True,null=True)
gpsdata = models.TextField(blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return self.gpsdata
class WaypointsData(models.Model):
#id = models.AutoField(primary_key=True)
deviceid = models.CharField(max_length=20,primary_key=True)
taskid = models.CharField(max_length=100,blank=True,null=True)
waypointsdata = models.TextField(blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return self.waypointsdata
class GPShistoricalData(models.Model):
#id = models.AutoField(primary_key=True)
deviceid = models.CharField(max_length=20,primary_key=True)
taskid = models.CharField(max_length=100,blank=True,null=True)
gpshistoricaldata = models.TextField(blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return self.gpshistoricaldata
class DataStorage(models.Model):
id = models.AutoField(primary_key=True)
taskid = models.CharField(max_length=100,blank=True,null=True)
subtaskid = models.CharField(max_length=100,blank=True,null=True)
data = models.TextField(blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return self.data
class TaskAssignment(models.Model):
id = models.AutoField(primary_key=True)
resourcetype=models.CharField(max_length=100, blank=True, null=True)
planningno=models.CharField(max_length=100, blank=True,null=True)
priority=models.CharField(max_length=100, blank=True,null=True)
task_complete=models.BooleanField(default=True)
task_partially_finished=models.BooleanField(default=True)
urgent_follow_up=models.BooleanField(default=True)
task_number=models.CharField(max_length=100,blank=True, default="0000")
team_identifier=models.CharField(max_length=100, blank=True,null=True)
resource_type=models.CharField(max_length=100, blank=True,null=True)
task_map=models.CharField(max_length=100, blank=True,null=True)
branch=models.CharField(max_length=100, blank=True,null=True)
division_group=models.CharField(max_length=100, blank=True,null=True)
incident_name=models.CharField(max_length=100, blank=True,null=True)
task_instructions=models.TextField( blank=True,null=True)
previous_search=models.CharField(max_length=1000, blank=True,null=True)
transportation=models.CharField(max_length=1000, blank=True,null=True)
equipment_requirements=models.CharField(max_length=1000, blank=True,null=True)
expected_time_frame=models.BooleanField(default=True)
expected_time_frame_input=models.CharField(max_length=100, blank=True,null=True)
target_pod_subject=models.BooleanField(default=True)
target_pod_subject_input=models.CharField(max_length=100, blank=True,null=True)
target_pod_clues=models.BooleanField(default=True)
target_pod_clues_input=models.CharField(max_length=100, blank=True,null=True)
team_nearby=models.BooleanField(default=True)
team_nearby_input=models.CharField(max_length=100, blank=True,null=True)
applicable_clues=models.BooleanField(default=True)
terrain_hazrds=models.BooleanField(default=True)
weather_safety_issues=models.BooleanField(default=True)
press_family_plans=models.BooleanField(default=True)
subject_information=models.BooleanField(default=True)
rescue_find_plans=models.BooleanField(default=True)
others=models.BooleanField(default=True)
others_input=models.TextField(blank=True,null=True)
team_call_sign=models.CharField(max_length=100, blank=True,null=True)
freq_team=models.CharField(max_length=100, blank=True,null=True)
base_call_sign=models.CharField(max_length=100, blank=True,null=True)
freq_base=models.CharField(max_length=100, blank=True,null=True)
pertinent_phone_no=models.CharField(max_length=100, blank=True,null=True)
base=models.CharField(max_length=100, blank=True,null=True)
check_in_feq=models.CharField(max_length=100, blank=True,null=True)
check_in_hour=models.CharField(max_length=100, blank=True,null=True)
tactical_1_function=models.CharField(max_length=100, blank=True,null=True)
tactical_1_freq=models.CharField(max_length=100, blank=True,null=True)
tactical_1_comments=models.CharField(max_length=100, blank=True,null=True)
tactical_2_function=models.CharField(max_length=100, blank=True,null=True)
tactical_2_freq=models.CharField(max_length=100, blank=True,null=True)
tactical_2_comments=models.CharField(max_length=100, blank=True,null=True)
tactical_3_function=models.CharField(max_length=100, blank=True,null=True)
tactical_3_freq=models.CharField(max_length=100, blank=True,null=True)
tactical_3_comments=models.CharField(max_length=100, blank=True,null=True)
tactical_4_function=models.CharField(max_length=100, blank=True,null=True)
tactical_4_freq=models.CharField(max_length=100, blank=True,null=True)
tactical_4_comments=models.CharField(max_length=100, blank=True,null=True)
note_safety_message=models.TextField( blank=True,null=True)
prepared_by=models.CharField(max_length=100, blank=True,null=True)
briefed_by=models.CharField(max_length=100, blank=True,null=True)
time_out=models.CharField(max_length=100, blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return self.resourcetype
class ClueMedia(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=255, blank=True,null=True)
longitude = models.DecimalField(max_digits=12, decimal_places=9, blank=True,null=True)
latitude = models.DecimalField(max_digits=12, decimal_places=9, blank=True,null=True)
photo = models.ImageField(upload_to='uploads/', default='No-img.png', blank=True,null=True)
description = models.CharField(max_length=100, blank=True,null=True)
def __str__(self):
return str(self.id)
class ExperimentDataStorage(models.Model):
id = models.AutoField(primary_key=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
details = models.TextField( blank=True,null=True)
def __str__(self):
return str(self.id)
class QuestionnaireModel(models.Model):
id = models.AutoField(primary_key=True)
participantid=models.CharField(max_length=100, blank=True, null=True)
taskid=models.CharField(max_length=100, blank=True, null=True)
sceneid=models.CharField(max_length=100, blank=True, null=True)
trust=models.IntegerField(blank=True,null=True)
transparency=models.IntegerField(blank=True,null=True)
workload=models.IntegerField(blank=True,null=True)
trans1=models.IntegerField(blank=True,null=True)
trans2=models.IntegerField(blank=True,null=True)
trans3=models.IntegerField(blank=True,null=True)
trans4=models.IntegerField(blank=True,null=True)
trans5=models.IntegerField(blank=True,null=True)
trust1=models.IntegerField(blank=True,null=True)
trust2=models.IntegerField(blank=True,null=True)
trust3=models.IntegerField(blank=True,null=True)
trust4=models.IntegerField(blank=True,null=True)
trust5=models.IntegerField(blank=True,null=True)
NASATLX1_mental=models.IntegerField(blank=True,null=True)
NASATLX2_physical=models.IntegerField(blank=True,null=True)
NASATLX3_temporal=models.IntegerField(blank=True,null=True)
NASATLX4_performance=models.IntegerField(blank=True,null=True)
NASATLX5_effort=models.IntegerField(blank=True,null=True)
NASATLX6_frustration=models.IntegerField(blank=True,null=True)
q1=models.IntegerField(blank=True,null=True)
q2=models.IntegerField(blank=True,null=True)
q3=models.IntegerField(blank=True,null=True)
q4=models.IntegerField(blank=True,null=True)
q5=models.IntegerField(blank=True,null=True)
q6=models.IntegerField(blank=True,null=True)
q7=models.IntegerField(blank=True,null=True)
q8=models.IntegerField(blank=True,null=True)
q9=models.IntegerField(blank=True,null=True)
q10=models.IntegerField(blank=True,null=True)
q11=models.IntegerField(blank=True,null=True)
q12=models.IntegerField(blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return str(self.id)
class WebapplicationModel(models.Model):
id = models.AutoField(primary_key=True)
participantid=models.CharField(max_length=100, blank=True, null=True)
taskid=models.CharField(max_length=100, blank=True, null=True)
sceneid=models.CharField(max_length=100, blank=True, null=True)
q1=models.IntegerField(blank=True,null=True)
q2=models.IntegerField(blank=True,null=True)
q3=models.IntegerField(blank=True,null=True)
q4=models.IntegerField(blank=True,null=True)
q5=models.IntegerField(blank=True,null=True)
q6=models.IntegerField(blank=True,null=True)
q7=models.IntegerField(blank=True,null=True)
q8=models.IntegerField(blank=True,null=True)
q9=models.IntegerField(blank=True,null=True)
q10=models.IntegerField(blank=True,null=True)
q11=models.IntegerField(blank=True,null=True)
q12=models.IntegerField(blank=True,null=True)
q13=models.IntegerField(blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return str(self.id)
class ParticipantStatusModel(models.Model):
id = models.AutoField(primary_key=True)
participantid=models.CharField(max_length=100, blank=True, null=True) # a unix time
participantindex=models.IntegerField(blank=True, null=True) # a number between 1-122
participantname=models.CharField(max_length=100, blank=True, null=True)
status=models.BooleanField(default=False) # participant id has finished the whole task
taskstatus=models.TextField( blank=True,null=True) # json text {'task1':{'status':0,'experimentsetup':54,'duration':0},}
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return str(self.id)
class DemographicsModel(models.Model):
id = models.AutoField(primary_key=True)
participantid=models.CharField(max_length=100, blank=True, null=True) # a unix time
participantindex=models.IntegerField(blank=True, null=True)
age=models.IntegerField(blank=True,null=True)
gender=models.IntegerField(blank=True,null=True)
education=models.IntegerField(blank=True,null=True)
sart=models.IntegerField(blank=True,null=True)
q1=models.IntegerField(blank=True,null=True)
q2=models.CharField(max_length=100, blank=True, null=True)
q3=models.CharField(max_length=100, blank=True, null=True)
q4=models.TextField( blank=True,null=True)
q5=models.TextField( blank=True,null=True)
q6=models.TextField( blank=True,null=True)
q7=models.TextField( blank=True,null=True)
q8=models.TextField( blank=True,null=True)
q9=models.TextField( blank=True,null=True)
q10=models.TextField( blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return str(self.id)
class PostExpSurveyModel(models.Model):
id = models.AutoField(primary_key=True)
participantid=models.CharField(max_length=100, blank=True, null=True) # a unix time
q1=models.TextField(blank=True,null=True)
q2=models.TextField(blank=True, null=True)
q3=models.TextField(blank=True, null=True)
q4=models.TextField( blank=True,null=True)
q5=models.TextField( blank=True,null=True)
q6=models.TextField( blank=True,null=True)
q7=models.TextField( blank=True,null=True)
q8=models.TextField( blank=True,null=True)
q9=models.TextField( blank=True,null=True)
q10=models.TextField( blank=True,null=True)
q11=models.TextField( blank=True,null=True)
created_at = models.DateTimeField(auto_now_add=True,blank=True, null=True)
updated_at = models.DateTimeField(auto_now=True,blank=True, null=True)
def __str__(self):
return str(self.id)
| 45.481848
| 124
| 0.761773
| 1,923
| 13,781
| 5.308892
| 0.113365
| 0.143697
| 0.206289
| 0.269762
| 0.848663
| 0.819179
| 0.708101
| 0.704574
| 0.681653
| 0.624547
| 0
| 0.024501
| 0.117408
| 13,781
| 302
| 125
| 45.63245
| 0.814848
| 0.022567
| 0
| 0.464435
| 0
| 0
| 0.001635
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058577
| false
| 0
| 0.008368
| 0.058577
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
f018a2fed7d2d7860feb20f52014c563220af02c
| 1,436
|
py
|
Python
|
pyluna-pathology/tests/luna/pathology/cli/test_load_slide.py
|
msk-mind/data-processing
|
c016d218da2eca003d06b96f2c03f16b3ce97873
|
[
"Apache-2.0"
] | 1
|
2022-03-29T03:48:00.000Z
|
2022-03-29T03:48:00.000Z
|
pyluna-pathology/tests/luna/pathology/cli/test_load_slide.py
|
msk-mind/data-processing
|
c016d218da2eca003d06b96f2c03f16b3ce97873
|
[
"Apache-2.0"
] | 96
|
2020-11-15T01:39:12.000Z
|
2021-08-24T14:37:49.000Z
|
pyluna-pathology/tests/luna/pathology/cli/test_load_slide.py
|
msk-mind/luna
|
282b5bd594cb5bf1ef2a7fdf56fca9bea5ad7102
|
[
"Apache-2.0"
] | 1
|
2021-01-04T15:14:23.000Z
|
2021-01-04T15:14:23.000Z
|
from click.testing import CliRunner
import os, shutil
from luna.pathology.cli.load_slide import cli
def test_cli():
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'pyluna-pathology/tests/luna/pathology/cli/testdata/test_config.yml',
'-s', '123',
'-m', 'pyluna-pathology/tests/luna/pathology/cli/testdata/load_slide.yml'])
assert result.exit_code == 0
assert os.path.lexists('pyluna-pathology/tests/luna/pathology/cli/testdata/data/test/slides/123/pathology.etl/WholeSlideImage/data')
assert os.path.exists('pyluna-pathology/tests/luna/pathology/cli/testdata/data/test/slides/123/pathology.etl/WholeSlideImage/metadata.json')
# clean up
shutil.rmtree("pyluna-pathology/tests/luna/pathology/cli/testdata/data/test/slides/123/pathology.etl/WholeSlideImage")
def test_cli_with_patientid():
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'pyluna-pathology/tests/luna/pathology/cli/testdata/test_config.yml',
'-s', '123',
'-m', 'pyluna-pathology/tests/luna/pathology/cli/testdata/load_slide_with_patientid.yml'])
assert result.exit_code == 0
assert os.path.lexists('pyluna-pathology/tests/luna/pathology/cli/testdata/data/test/slides/123/pathology.etl/WholeSlideImage/data')
assert os.path.exists('pyluna-pathology/tests/luna/pathology/cli/testdata/data/test/slides/123/pathology.etl/WholeSlideImage/metadata.json')
| 42.235294
| 144
| 0.737465
| 192
| 1,436
| 5.447917
| 0.229167
| 0.124283
| 0.152964
| 0.206501
| 0.858509
| 0.858509
| 0.858509
| 0.858509
| 0.858509
| 0.858509
| 0
| 0.018125
| 0.116295
| 1,436
| 33
| 145
| 43.515152
| 0.806147
| 0.005571
| 0
| 0.636364
| 0
| 0.227273
| 0.587658
| 0.575035
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.090909
| false
| 0
| 0.136364
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f0259ee9fcde004d487cac02c9c405b878a56fad
| 57,358
|
py
|
Python
|
model.py
|
LeeGungun/LqxSleepStageNet
|
f2c3209af4047ede2bc59c95606c82f566a9f1d8
|
[
"Apache-2.0"
] | null | null | null |
model.py
|
LeeGungun/LqxSleepStageNet
|
f2c3209af4047ede2bc59c95606c82f566a9f1d8
|
[
"Apache-2.0"
] | null | null | null |
model.py
|
LeeGungun/LqxSleepStageNet
|
f2c3209af4047ede2bc59c95606c82f566a9f1d8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import tensorflow as tf
from tensorflow import keras
import nn
import gc
class TimeConv(keras.Model):
def __init__(self, name, config):
super(TimeConv, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = config.data_format
self.pattern_conv = nn.PatternConv('pattern_convolution', self.config.seed, self.config.pattern_layers,
self.config.pattern_filters, self.config.pattern_stride,
self.config.pattern_act, self.config.pattern_bias,
self.config.pattern_padding, self.config.pattern_dropout,
self.config.pattern_max_pool, self.config.pattern_wd,
self.config.pattern_he_scale, self.data_format, self.dtype)
self.depth_conv = list(nn.DepthConv('No.{}_depth_convolution'.format(i + 1), config.seed,
config.time_dc_dfilters[i], config.time_dc_pfilter_num[i],
config.time_dc_strides[i], config.time_dc_act[i], config.time_dc_bias[i],
config.wd, config.time_dc_pd[i], self.data_format,
self.dtype, config.time_dc_act_filter[i], config.time_dc_he_scale[i])
for i in range(len(config.time_dc_dfilters)))
self.var_se = nn.VarianceSE('variance_squeeze_excitation', self.config.se_trans_chs, self.config.seed,
self.config.se_he_scale, self.config.se_bias, self.config.se_wd, self.dtype)
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
prod = self.config.epoch_second * self.config.fs
inputs = tf.zeros((self.config.batch_size, self.config.chs, prod, 1), dtype=self.dtype)
return inputs, self.config.batch_size # need to append 'training'
# inputs --- T --- (epochs, chs, epoch_second * fs, 1)
def call(self, inputs, training=None, heap=None):
inputs = tf.cast(inputs, self.dtype)
chs_axis = 3 - self.data_format.find('H')
ori_chs = inputs.shape[chs_axis]
inputs = self.pattern_conv(inputs, training, heap, self.config.relu_leaky, self.config.relu_threshold)
# output: (epochs, chs * pfilter_depth_prod, h', 3 * 1)
inputs = tf.concat(inputs, axis=-1)
for dc in self.depth_conv:
inputs = dc(inputs, training, heap, relu=self.config.relu_leaky, threshold=self.config.relu_threshold)
# output: (epochs, wins, chs * pfilter_depth_prod, h' // wins, 3 * 1)
split_into_wins = tf.stack(tf.split(inputs, self.config.wins, axis=(3 - chs_axis)), axis=1)
# output: (epochs, wins, chs, pfilter_depth_prod, h' // wins, 3 * 1)
split_into_wins = tf.stack(tf.split(split_into_wins, ori_chs, axis=(chs_axis + 1)), axis=2)
del inputs
gc.collect()
# track out pattern: 3 patterns integrate into one
temp = tf.transpose(tf.reduce_mean(split_into_wins, axis=(5 - chs_axis)), (4, 0, 2, 3, 1))
# (epochs, chs, 3)
var = tf.transpose(self.var_se(temp, self.config.relu_leaky, self.config.relu_threshold, heap), (1, 2, 0))
var = tf.reshape(var, (var.shape[0], 1, var.shape[1], 1, var.shape[2]))
split_into_wins = tf.reduce_max(split_into_wins, axis=(5 - chs_axis))
# output shape: (epochs. wins, chs, pfilter_depth_prod, 1)
split_into_wins = tf.reduce_sum(tf.multiply(split_into_wins, var), axis=-1, keepdims=True)
return split_into_wins
class STimeConv(keras.Model):
def __init__(self, name, config):
super(STimeConv, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = config.data_format
self.depth_conv = list(nn.DepthConv('No.{}_depth_convolution'.format(i + 1), config.seed,
config.time_dc_dfilters[i], config.time_dc_pfilter_num[i],
config.time_dc_strides[i], config.time_dc_act[i], config.time_dc_bias[i],
config.wd, config.time_dc_pd[i], self.data_format,
self.dtype, config.time_dc_act_filter[i], config.time_dc_he_scale[i])
for i in range(len(config.time_dc_dfilters)))
self.insert_max = list(nn.MaxPool('No.{}_max_pooling'.format(i + 1), e[0], e[1], e[2], self.data_format)
for i, e in enumerate(config.time_dc_insert_max_info))
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
prod = self.config.epoch_second * self.config.fs
inputs = tf.zeros((self.config.batch_size, self.config.chs, prod, 1), dtype=self.dtype)
return inputs, self.config.batch_size # need to append 'training'
# inputs --- T --- (epochs, chs, epoch_second * fs, 1)
def call(self, inputs, training=None, heap=None):
inputs = tf.cast(inputs, self.dtype)
chs_axis = 3 - self.data_format.find('H')
ori_chs = inputs.shape[chs_axis]
#inputs = self.pattern_conv(inputs, training, heap, self.config.relu_leaky, self.config.relu_threshold)
# output: (epochs, chs * pfilter_depth_prod, h', 3 * 1)
#inputs = tf.concat(inputs, axis=-1)
max_flag = 0
for dc, flag, d in zip(self.depth_conv, self.config.time_dc_insert_max, self.config.time_dc_dropout):
inputs = dc(inputs, training, heap, relu=self.config.relu_leaky, threshold=self.config.relu_threshold)
if flag == 1:
inputs = self.insert_max[max_flag](inputs, heap)
max_flag += 1
if training and d:
inputs = tf.nn.dropout(inputs, d, noise_shape=[1, None, None, 1])
# output: (epochs, wins, chs * pfilter_depth_prod, h' // wins, 1)
split_into_wins = tf.stack(tf.split(inputs, self.config.wins, axis=(3 - chs_axis)), axis=1)
# output: (epochs, wins, chs, pfilter_depth_prod, h' // wins, 1)
split_into_wins = tf.stack(tf.split(split_into_wins, ori_chs, axis=(chs_axis + 1)), axis=2)
del inputs
gc.collect()
# output shape: (epochs. wins, chs, pfilter_depth_prod, 1)
return tf.reduce_max(split_into_wins, axis=(5 - chs_axis))
class SVDTimeConv(keras.Model):
def __init__(self, name, config):
super(SVDTimeConv, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = config.data_format
self.pattern_conv = nn.PatternConv('pattern_convolution', self.config.seed, self.config.pattern_layers,
self.config.pattern_filters, self.config.pattern_stride,
self.config.pattern_act, self.config.pattern_bias,
self.config.pattern_padding, self.config.pattern_dropout,
self.config.pattern_max_pool, self.config.pattern_wd,
self.config.pattern_he_scale, self.data_format, self.dtype)
self.depth_conv = list(nn.DepthConv('No.{}_depth_convolution'.format(i + 1), config.seed,
config.time_dc_dfilters[i], config.time_dc_pfilter_num[i],
config.time_dc_strides[i], config.time_dc_act[i], config.time_dc_bias[i],
config.wd, config.time_dc_pd[i], self.data_format,
self.dtype, config.time_dc_act_filter[i], config.time_dc_he_scale[i])
for i in range(len(config.time_dc_dfilters)))
self.svd_se = nn.SVDSE('svd_squeeze_excitation', self.config.seed, self.config.se_he_scale,
self.config.se_bias, self.config.se_wd, self.dtype)
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
prod = self.config.epoch_second * self.config.fs
inputs = tf.zeros((self.config.batch_size, self.config.chs, prod, 1), dtype=self.dtype)
return inputs, self.config.batch_size # need to append 'training'
# inputs --- T --- (epochs, chs, epoch_second * fs, 1)
def call(self, inputs, training=None, heap=None):
inputs = tf.cast(inputs, self.dtype)
chs_axis = 3 - self.data_format.find('H')
ori_chs = inputs.shape[chs_axis]
# outputs type --- tuple, the element keeps the original shape
inputs = self.pattern_conv(inputs, training, heap, self.config.relu_leaky, self.config.relu_threshold)
# output: (epochs, chs * pfilter_depth_prod, h', 3 * 1) have no connection with data format
inputs = tf.concat(inputs, axis=-1)
for dc in self.depth_conv:
inputs = dc(inputs, training, heap, relu=self.config.relu_leaky, threshold=self.config.relu_threshold)
# output: (epochs, wins, chs * pfilter_depth_prod, h' // wins, 3 * 1)
split_into_wins = tf.stack(tf.split(inputs, self.config.wins, axis=(3 - chs_axis)), axis=1)
# output: (epochs, wins, chs, pfilter_depth_prod, h' // wins, 3 * 1)
split_into_wins = tf.stack(tf.split(split_into_wins, ori_chs, axis=(chs_axis + 1)), axis=2)
del inputs
gc.collect()
# track out pattern: 3 patterns integrate into one
temp = tf.reduce_mean(split_into_wins, axis=(5 - chs_axis))
# (epochs, wins, chs, pfilter_depth_prod, 3 * 1)
svd = self.svd_se(temp, self.config.relu_leaky, self.config.relu_threshold, heap)
split_into_wins = tf.reduce_max(split_into_wins, axis=(5 - chs_axis))
# output shape: (epochs. wins, chs, pfilter_depth_prod, 1)
split_into_wins = tf.reduce_sum(tf.multiply(split_into_wins, svd), axis=-1, keepdims=True)
return split_into_wins
class NTimeConv(keras.Model):
def __init__(self, name, config):
super(NTimeConv, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = config.data_format
self.pattern_conv = nn.PatternConv('pattern_convolution', self.config.seed, self.config.pattern_layers,
self.config.pattern_filters, self.config.pattern_stride,
self.config.pattern_act, self.config.pattern_bias,
self.config.pattern_padding, self.config.pattern_dropout,
self.config.pattern_max_pool, self.config.pattern_wd,
self.config.pattern_he_scale, self.data_format, self.dtype)
self.depth_conv = list(nn.DepthConv('No.{}_depth_convolution'.format(i + 1), config.seed,
config.time_dc_dfilters[i], config.time_dc_pfilter_num[i],
config.time_dc_strides[i], config.time_dc_act[i], config.time_dc_bias[i],
config.wd, config.time_dc_pd[i], self.data_format,
self.dtype, config.time_dc_act_filter[i], config.time_dc_he_scale[i])
for i in range(len(config.time_dc_dfilters)))
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
prod = self.config.epoch_second * self.config.fs
inputs = tf.zeros((self.config.batch_size, self.config.chs, prod, 1), dtype=self.dtype)
return inputs, self.config.batch_size # need to append 'training'
# inputs --- T --- (epochs, chs, epoch_second * fs, 1)
def call(self, inputs, training=None, heap=None):
inputs = tf.cast(inputs, self.dtype)
chs_axis = 3 - self.data_format.find('H')
ori_chs = inputs.shape[chs_axis]
# outputs type --- tuple, the element keeps the original shape
inputs = self.pattern_conv(inputs, training, heap, self.config.relu_leaky, self.config.relu_threshold)
# output: (epochs, chs * pfilter_depth_prod, h', 3 * 1) have no connection with data format
inputs = tf.concat(inputs, axis=-1)
for dc in self.depth_conv:
inputs = dc(inputs, training, heap, relu=self.config.relu_leaky, threshold=self.config.relu_threshold)
# output: (epochs, wins, chs * pfilter_depth_prod, h' // wins, 3 * 1)
split_into_wins = tf.stack(tf.split(inputs, self.config.wins, axis=(3 - chs_axis)), axis=1)
# output: (epochs, wins, chs, pfilter_depth_prod, h' // wins, 3 * 1)
split_into_wins = tf.stack(tf.split(split_into_wins, ori_chs, axis=(chs_axis + 1)), axis=2)
del inputs
gc.collect()
# track out pattern: 3 patterns integrate into one
temp = tf.reduce_max(split_into_wins, axis=(5 - chs_axis))
# output shape: (epochs. wins, chs, 3*pfilter_depth_prod, 1)
split_into_wins = tf.concat(tf.split(temp, 3, axis=-1), axis=-2)
return split_into_wins
class ABiGRUBaseForStackBi(keras.Model):
def __init__(self, name, config):
super(ABiGRUBaseForStackBi, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = self.config.data_format
self.has_dynamics = 'df' in config.inputs
if 't' in config.inputs:
self.select_t = TimeConv('time_conv_net', self.config)
self.dropout_t = keras.layers.Dropout(self.config.dropout_t, noise_shape=(1, None, None, None, None),
name='time_dropout_before_attention', dtype=self.dtype)
else:
self.select_t = None
self.dropout_t = None
if 'f' in config.inputs:
self.select_fre = nn.SelectF('select_fre', self.config.f_low_num, self.config.f_mid_num,
self.config.select_f, self.config.seed, self.has_dynamics, self.config.f_wd,
self.config.f_he_scale, self.dtype, self.data_format)
self.dropout_fre = keras.layers.Dropout(self.config.dropout_f, noise_shape=(1, None, None, None, None),
name='fre_dropout_before_attention', dtype=self.dtype)
else:
self.select_fre = None
self.dropout_fre = None
self.disassemble = nn.Disassemble('disassemble', self.config.seq_len, self.config.seq_batch_size,
self.config.padding_mode)
self.stack_bi_gru = nn.StackBiGRUPlusAttention('stack_bi_gru', self.config.seq_len, self.config.seed,
self.config.rnn_h_units, self.config.focus_units,
self.config.rnn_mmode, self.config.rnn_layers,
self.config.focus_bias, self.config.focus_wd, self.dtype,
self.config.rnn_h_wd, self.config.rnn_input_dropout,
self.config.rnn_hidden_dropout)
self.classification = nn.Classification('classification', self.config.classes, self.config.seed,
self.config.classifier_dr_rate, self.dtype, self.config.classifier_bias,
self.config.wd)
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
if 't' in self.config.inputs:
t = tf.zeros((self.config.batch_size, self.config.chs, self.config.epoch_second * self.config.fs),
dtype=self.dtype)
else:
t = None
if 'f' in self.config.inputs:
f = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft), dtype=self.dtype)
else:
f = None
if self.has_dynamics:
df = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft, 2),
dtype=self.dtype)
else:
df = None
inputs = (t, f, df)
initial_states = None
last_output = None
no_begin = tf.constant([True], dtype='bool')
return inputs, initial_states, last_output, no_begin # when use please append 'training' & 'classifier'
# inputs --- (T, F) or (T, F, DF) per fre element --- (epochs, chs, wins, points)
# time element --- (epochs, chs, epoch_sampling_points)
# initial_states --- (((1, h_units, f) or None, None) or None) * layers or None
# last_outputs --- ((1, bottom_h_units, f) or None, None) or None
# no_begin --- None or 1-D Tensor (bool) True indicates needs to use self-attention
def call(self, inputs, initial_states=None, last_outputs=None, no_begin=None, training=None,
classifier=True):
if self.config.heap is not None:
time_heap = self.config.heap
fre_heap = int(self.config.heap * self.config.wins)
else:
time_heap = None
fre_heap = None
if inputs[0] is not None and self.select_t is not None:
t = tf.expand_dims(inputs[0], axis=-1)
t = self.select_t(t, training, time_heap)
t = self.dropout_t(t, training)
else:
t = None
if inputs[1] is not None and self.select_fre is not None:
epochs = inputs[1].shape[0]
f = tf.expand_dims(inputs[1], axis=-1)
if inputs[2] is not None and self.has_dynamics:
f = tf.concat([f, inputs[2]], axis=-1)
del inputs
gc.collect()
f = tf.transpose(f, (0, 2, 1, 3, 4))
f = tf.concat(tf.unstack(f, f.shape[0], axis=0), axis=0)
f = self.select_fre(f, training, fre_heap)
f = tf.stack(tf.split(f, epochs, axis=0), axis=0)
f = self.dropout_fre(f, training)
else:
del inputs
gc.collect()
f = None
if t is not None and f is not None:
all_in = tf.concat((t, f), axis=-1)
elif t is not None:
all_in = t
elif f is not None:
all_in = f
del t, f
gc.collect()
_f = all_in.shape[-1]
# (seqs, steps, 1, wins, chs, xxx, _f) mask output shape: (seqs, steps, 1)
all_in, mask = self.disassemble(all_in)
ini_s = initial_states
last_o = last_outputs
seqs = len(all_in)
# the last seq may have different steps
outputs = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
ch_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
win_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
exp_no_begin = (no_begin,) + (None,) * (seqs - 1) # can't use tf.range
if classifier:
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.stack_bi_gru(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = self.classification(tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1), temp_m, training)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = tuple((s_i[0], None) for s_i in gru_o[1])
last_o = (gru_o[2][0], None) # bottom layer
else:
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.stack_bi_gru(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = tuple((s_i[0], None) for s_i in gru_o[1])
last_o = (gru_o[2][0], None) # bottom layer
next_ini = ini_s
next_last = last_o
outputs = outputs.concat()
# (batch, n_class/top_units)
outputs = tf.reshape(outputs, (-1, outputs.shape[-1]))
ch_attention = tf.concat(tuple(ch_attention.read(i) for i in range(seqs)), axis=1)
ch_attention = tf.transpose(ch_attention, (1, 2, 0, 4, 3))
# (batch, 2, f, chs)
ch_attention = tf.reshape(ch_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(ch_attention.shape[-3:])], axis=0))
win_attention = tf.concat(tuple(win_attention.read(i) for i in range(seqs)), axis=1)
win_attention = tf.transpose(win_attention, (1, 2, 0, 5, 3, 4))
# (batch, 2, f, wins, chs)
win_attention = tf.reshape(win_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(win_attention.shape[-4:])], axis=0))
# 已经全部使用mask对齐过结果
# output --- (epochs, n_class/top_units) ch_att --- (epochs, 2, 1/3, chs)
# win_att --- (epochs, 2, 1/3, wins, chs)
# ini_s returned and last_o returned need tackling furthermore (should reset zeros or None when new_file starts)
return [outputs], next_ini, next_last, [ch_attention], [win_attention]
def reset_ele(self, ch_info, ini, o, refer=None):
ch_info = tf.cast(ch_info, self.dtype)
exp_dims = o[0].shape.ndims - ch_info.shape.ndims
exp_ch_info = tf.reshape(ch_info, ch_info.shape + (1,) * exp_dims)
new_o = (tf.multiply(o[0], exp_ch_info), None)
new_ini = tuple((tf.multiply(e[0], exp_ch_info), None) for e in ini)
if refer is not None:
new_refer = tf.multiply(refer, tf.reshape(ch_info, ch_info.shape + (1,) * (refer.shape.ndims -
ch_info.shape.ndims)))
else:
new_refer = None
return new_ini, new_o, new_refer
class ABiGRUBaseForBiWrapper(keras.Model):
def __init__(self, name, config):
super(ABiGRUBaseForBiWrapper, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = self.config.data_format
self.has_dynamics = 'df' in config.inputs
if 't' in config.inputs:
self.select_t = TimeConv('time_conv_net', self.config)
self.dropout_t = keras.layers.Dropout(self.config.dropout_t, noise_shape=(1, None, None, None, None),
name='time_dropout_before_attention', dtype=self.dtype)
else:
self.select_t = None
self.dropout_t = None
if 'f' in config.inputs:
self.select_fre = nn.SelectF('select_fre', self.config.f_low_num, self.config.f_mid_num,
self.config.select_f, self.config.seed, self.has_dynamics, self.config.f_wd,
self.config.f_he_scale, self.dtype, self.data_format)
self.dropout_fre = keras.layers.Dropout(self.config.dropout_f, noise_shape=(1, None, None, None, None),
name='fre_dropout_before_attention', dtype=self.dtype)
else:
self.select_fre = None
self.dropout_fre = None
self.disassemble = nn.Disassemble('disassemble', self.config.seq_len, self.config.seq_batch_size,
self.config.padding_mode)
self.bi_wrapper = nn.BidirectionalWrapper('bi_wrapper_for_gru_plus_attention', self.config.seq_len,
self.config.seed, self.config.rnn_h_units, self.config.focus_units,
self.config.rnn_layers, self.config.bi_wrapper_mmode,
self.config.focus_bias, self.config.focus_wd, self.dtype,
self.config.rnn_h_wd, self.config.rnn_input_dropout,
self.config.rnn_hidden_dropout)
self.classification = nn.Classification('classification', self.config.classes, self.config.seed,
self.config.classifier_dr_rate, self.dtype, self.config.classifier_bias,
self.config.wd)
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
if 't' in self.config.inputs:
t = tf.zeros((self.config.batch_size, self.config.chs, self.config.epoch_second * self.config.fs),
dtype=self.dtype)
else:
t = None
if 'f' in self.config.inputs:
f = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft), dtype=self.dtype)
else:
f = None
if self.has_dynamics:
df = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft, 2),
dtype=self.dtype)
else:
df = None
inputs = (t, f, df)
initial_states = None
last_output = None
no_begin = tf.constant([True], dtype='bool')
return inputs, initial_states, last_output, no_begin # when use please append 'training' & 'classifier'
# inputs --- (T, F) or (T, F, DF) per fre element --- (epochs, chs, wins, points)
# time element --- (epochs, chs, epoch_sampling_points)
# initial_states --- ((((1, h_units, f) or None) * layers or None), None) or None
# last_outputs --- ((1, top_h_units, f) or None, None) or None
# no_begin --- None or 1-D Tensor (bool) True indicates needs to use self-attention
def call(self, inputs, initial_states=None, last_outputs=None, no_begin=None, training=None,
classifier=True):
if self.config.heap is not None:
time_heap = self.config.heap
fre_heap = int(self.config.heap * self.config.wins)
else:
time_heap = None
fre_heap = None
if inputs[0] is not None and self.select_t is not None:
t = tf.expand_dims(inputs[0], axis=-1)
t = self.select_t(t, training, time_heap)
t = self.dropout_t(t, training)
else:
t = None
if inputs[1] is not None and self.select_fre is not None:
epochs = inputs[1].shape[0]
f = tf.expand_dims(inputs[1], axis=-1)
if inputs[2] is not None and self.has_dynamics:
f = tf.concat([f, inputs[2]], axis=-1)
del inputs
gc.collect()
f = tf.transpose(f, (0, 2, 1, 3, 4))
f = tf.concat(tf.unstack(f, f.shape[0], axis=0), axis=0)
f = self.select_fre(f, training, fre_heap)
f = tf.stack(tf.split(f, epochs, axis=0), axis=0)
f = self.dropout_fre(f, training)
else:
del inputs
gc.collect()
f = None
if t is not None and f is not None:
all_in = tf.concat((t, f), axis=-1)
elif t is not None:
all_in = t
elif f is not None:
all_in = f
del t, f
gc.collect()
_f = all_in.shape[-1]
# (seqs, steps, 1, wins, chs, xxx, _f) mask output shape: (seqs, steps, 1)
all_in, mask = self.disassemble(all_in)
ini_s = initial_states
last_o = last_outputs
seqs = len(all_in)
# the last seq may have different steps
outputs = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
ch_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
win_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
exp_no_begin = (no_begin,) + (None,) * (seqs - 1) # can't use tf.range
if classifier:
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.bi_wrapper(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = self.classification(tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1), temp_m, training)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = (gru_o[1][0], None)
last_o = (gru_o[2][0], None) # top layer
else:
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.bi_wrapper(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = (gru_o[1][0], None)
last_o = (gru_o[2][0], None) # top layer
next_ini = ini_s
next_last = last_o
outputs = outputs.concat()
# (batch, n_class/top_units)
outputs = tf.reshape(outputs, (-1, outputs.shape[-1]))
ch_attention = tf.transpose(ch_attention.concat(), (0, 1, 2, 4, 3))
# (batch, 2, f, chs)
ch_attention = tf.reshape(ch_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(ch_attention.shape[-3:])], axis=0))
win_attention = tf.transpose(win_attention.concat(), (0, 1, 2, 5, 3, 4))
# (batch, 2, f, wins, chs)
win_attention = tf.reshape(win_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(win_attention.shape[-4:])], axis=0))
# 已经全部使用mask对齐过结果
# output --- (epochs, n_class/top_units) ch_att --- (epochs, 2, 1/3, chs)
# win_att --- (epochs, 2, 1/3, wins, chs)
# ini_s returned and last_o returned need tackling furthermore (should reset zeros or None when new_file starts)
return [outputs], next_ini, next_last, [ch_attention], [win_attention]
def reset_ele(self, ch_info, ini, o, refer=None):
ch_info = tf.cast(ch_info, self.dtype)
exp_dims = o[0].shape.ndims - ch_info.shape.ndims
exp_ch_info = tf.reshape(ch_info, ch_info.shape + (1,) * exp_dims)
new_o = (tf.multiply(o[0], exp_ch_info), None)
new_ini = (tuple(tf.multiply(e, exp_ch_info) for e in ini[0]), None)
if refer is not None:
new_refer = tf.multiply(refer, tf.reshape(ch_info, ch_info.shape + (1,) * (refer.shape.ndims -
ch_info.shape.ndims)))
else:
new_refer = None
return new_ini, new_o, new_refer
class ABiGRURectifyForStackBi(keras.Model):
def __init__(self, name, config):
super(ABiGRURectifyForStackBi, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = self.config.data_format
self.has_dynamics = 'df' in config.inputs
if 't' in config.inputs:
self.select_t = TimeConv('time_conv_net', self.config)
self.dropout_t = keras.layers.Dropout(self.config.dropout_t, noise_shape=(1, None, None, None, None),
name='time_dropout_before_attention', dtype=self.dtype)
else:
self.select_t = None
self.dropout_t = None
if 'f' in config.inputs:
self.select_fre = nn.SelectF('select_fre', self.config.f_low_num, self.config.f_mid_num,
self.config.select_f, self.config.seed, self.has_dynamics, self.config.f_wd,
self.config.f_he_scale, self.dtype, self.data_format)
self.dropout_fre = keras.layers.Dropout(self.config.dropout_f, noise_shape=(1, None, None, None, None),
name='fre_dropout_before_attention', dtype=self.dtype)
else:
self.select_fre = None
self.dropout_fre = None
self.disassemble = nn.Disassemble('disassemble', self.config.seq_len, self.config.seq_batch_size,
self.config.padding_mode)
self.stack_bi_gru = nn.StackBiGRUPlusAttention('stack_bi_gru', self.config.seq_len, self.config.seed,
self.config.rnn_h_units, self.config.focus_units,
self.config.rnn_mmode, self.config.rnn_layers,
self.config.focus_bias, self.config.focus_wd, self.dtype,
self.config.rnn_h_wd, self.config.rnn_input_dropout,
self.config.rnn_hidden_dropout)
self.classification = nn.RectifyClassification('multi_task_classification', self.config.classes,
self.config.classifier_dr_rate, self.config.seed,
self.config.classifier_bias, self.dtype, self.config.wd)
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
if 't' in self.config.inputs:
t = tf.zeros((self.config.batch_size, self.config.chs, self.config.epoch_second * self.config.fs),
dtype=self.dtype)
else:
t = None
if 'f' in self.config.inputs:
f = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft), dtype=self.dtype)
else:
f = None
if self.has_dynamics:
df = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft, 2),
dtype=self.dtype)
else:
df = None
inputs = (t, f, df)
initial_states = None
last_output = None
refer = None
no_begin = tf.constant([True], dtype='bool')
return inputs, initial_states, last_output, no_begin, refer # when use please append 'training' & 'classifier'
# inputs --- (T, F) or (T, F, DF) per fre element --- (epochs, chs, wins, points)
# time element --- (epochs, chs, epoch_sampling_points)
# initial_states --- (((1, h_units, f) or None, None) or None) * layers or None
# last_outputs --- ((1, bottom_h_units, f) or None, None) or None
# refer --- (1, integrated_top_h_units) or None
# no_begin --- None or 1-D Tensor (bool) True indicates needs to use self-attention
def call(self, inputs, initial_states=None, last_outputs=None, no_begin=None, training=None, refer=None,
classifier=True):
n_class = self.config.classes
if self.config.heap is not None:
time_heap = self.config.heap
fre_heap = int(self.config.heap * self.config.wins)
else:
time_heap = None
fre_heap = None
if inputs[0] is not None and self.select_t is not None:
t = tf.expand_dims(inputs[0], axis=-1)
t = self.select_t(t, training, time_heap)
t = self.dropout_t(t, training)
else:
t = None
if inputs[1] is not None and self.select_fre is not None:
epochs = inputs[1].shape[0]
f = tf.expand_dims(inputs[1], axis=-1)
if inputs[2] is not None and self.has_dynamics:
f = tf.concat([f, inputs[2]], axis=-1)
del inputs
gc.collect()
f = tf.transpose(f, (0, 2, 1, 3, 4))
f = tf.concat(tf.unstack(f, f.shape[0], axis=0), axis=0)
f = self.select_fre(f, training, fre_heap)
f = tf.stack(tf.split(f, epochs, axis=0), axis=0)
f = self.dropout_fre(f, training)
else:
del inputs
gc.collect()
f = None
if t is not None and f is not None:
all_in = tf.concat((t, f), axis=-1)
elif t is not None:
all_in = t
elif f is not None:
all_in = f
del t, f
gc.collect()
_f = all_in.shape[-1]
# (seqs, steps, 1, wins, chs, xxx, _f) mask output shape: (seqs, steps, 1)
all_in, mask = self.disassemble(all_in)
ini_s = initial_states
last_o = last_outputs
last_refer = refer
seqs = len(all_in)
# the last seq may have different steps
outputs = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
ch_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
win_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
exp_no_begin = (no_begin,) + (None,) * (seqs - 1) # can't use tf.range
if classifier:
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.stack_bi_gru(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = self.classification(tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1), last_refer, temp_m,
training)
last_refer = result[-1]
result = tf.concat(result[:-1], axis=-1) # (steps, 1, n_class + 2)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = tuple((s_i[0], None) for s_i in gru_o[1])
last_o = (gru_o[2][0], None) # bottom layer
outputs = outputs.concat() # (steps * seqs, 1, n_class + 2)
# (batch, n_class + 2)
outputs = tf.reshape(outputs, (-1, outputs.shape[-1]))
outputs = tf.split(outputs, n_class + 2, axis=-1)
out1 = [tf.concat(outputs[:n_class], axis=-1)]
out2 = [tf.concat(outputs[-2:], axis=-1)]
else:
last_refer = None
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.stack_bi_gru(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1) # (steps, 1, integrated_top_units)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = tuple((s_i[0], None) for s_i in gru_o[1])
last_o = (gru_o[2][0], None) # bottom layer
outputs = outputs.concat() # (steps * seqs, 1, integrated_top_units)
# (batch, integrated_top_units)
outputs = tf.reshape(outputs, (-1, outputs.shape[-1]))
out1 = [outputs]
out2 = None
next_ini = ini_s
next_last = last_o
next_refer = last_refer
ch_attention = tf.concat(tuple(ch_attention.read(i) for i in range(seqs)), axis=1)
ch_attention = tf.transpose(ch_attention, (1, 2, 0, 4, 3))
# (batch, 2, f, chs)
ch_attention = tf.reshape(ch_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(ch_attention.shape[-3:])], axis=0))
win_attention = tf.concat(tuple(win_attention.read(i) for i in range(seqs)), axis=1)
win_attention = tf.transpose(win_attention, (1, 2, 0, 5, 3, 4))
# (batch, 2, f, wins, chs)
win_attention = tf.reshape(win_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(win_attention.shape[-4:])], axis=0))
# 已经全部使用mask对齐过结果
# output --- (epochs, n_class/top_units) ch_att --- (epochs, 2, 1/3, chs)
# win_att --- (epochs, 2, 1/3, wins, chs)
# ini_s returned and last_o returned need tackling furthermore (should reset zeros or None when new_file starts)
return out1, out2, next_ini, next_last, [ch_attention], [win_attention], next_refer
def reset_ele(self, ch_info, ini, o, refer=None):
ch_info = tf.cast(ch_info, self.dtype)
exp_dims = o[0].shape.ndims - ch_info.shape.ndims
exp_ch_info = tf.reshape(ch_info, ch_info.shape + (1,) * exp_dims)
new_o = (tf.multiply(o[0], exp_ch_info), None)
new_ini = tuple((tf.multiply(e[0], exp_ch_info), None) for e in ini)
if refer is not None:
new_refer = tf.multiply(refer, tf.reshape(ch_info, ch_info.shape + (1,) * (refer.shape.ndims -
ch_info.shape.ndims)))
else:
new_refer = None
return new_ini, new_o, new_refer
class ABiGRURectifyForBiWrapper(keras.Model):
def __init__(self, name, config):
super(ABiGRURectifyForBiWrapper, self).__init__(name=name, dtype=config.dtype)
self.config = config
self.data_format = self.config.data_format
self.has_dynamics = 'df' in config.inputs
if 't' in config.inputs:
self.select_t = TimeConv('time_conv_net', self.config)
self.dropout_t = keras.layers.Dropout(self.config.dropout_t, noise_shape=(1, None, None, None, None),
name='time_dropout_before_attention', dtype=self.dtype)
else:
self.select_t = None
self.dropout_t = None
if 'f' in config.inputs:
self.select_fre = nn.SelectF('select_fre', self.config.f_low_num, self.config.f_mid_num,
self.config.select_f, self.config.seed, self.has_dynamics, self.config.f_wd,
self.config.f_he_scale, self.dtype, self.data_format)
self.dropout_fre = keras.layers.Dropout(self.config.dropout_f, noise_shape=(1, None, None, None, None),
name='fre_dropout_before_attention', dtype=self.dtype)
else:
self.select_fre = None
self.dropout_fre = None
self.disassemble = nn.Disassemble('disassemble', self.config.seq_len, self.config.seq_batch_size,
self.config.padding_mode)
self.bi_wrapper = nn.BidirectionalWrapper('bi_wrapper_for_gru_plus_attention', self.config.seq_len,
self.config.seed, self.config.rnn_h_units, self.config.focus_units,
self.config.rnn_layers, self.config.bi_wrapper_mmode,
self.config.focus_bias, self.config.focus_wd, self.dtype,
self.config.rnn_h_wd, self.config.rnn_input_dropout,
self.config.rnn_hidden_dropout)
self.classification = nn.RectifyClassification('multi_task_classification', self.config.classes,
self.config.classifier_dr_rate, self.config.seed,
self.config.classifier_bias, self.dtype, self.config.wd)
self.regulate_layers = None
def regu(self):
if self.regulate_layers is None:
regulate_layers = []
for l in self.layers:
if hasattr(l, 'regu'):
regulate_layers.append(l)
self.regulate_layers = regulate_layers
r = []
for l in self.regulate_layers:
r += l.regu()
return r
def get_config(self):
config = {'name': self.name, 'config': self.config}
return config
@classmethod
def from_config(cls, kwargs):
return cls(**kwargs)
def get_input(self):
if 't' in self.config.inputs:
t = tf.zeros((self.config.batch_size, self.config.chs, self.config.epoch_second * self.config.fs),
dtype=self.dtype)
else:
t = None
if 'f' in self.config.inputs:
f = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft), dtype=self.dtype)
else:
f = None
if self.has_dynamics:
df = tf.zeros((self.config.batch_size, self.config.chs, self.config.wins, self.config.fft, 2),
dtype=self.dtype)
else:
df = None
inputs = (t, f, df)
initial_states = None
last_output = None
no_begin = tf.constant([True], dtype='bool')
refer = None
return inputs, initial_states, last_output, no_begin, refer # when use please append 'training' & 'classifier'
# inputs --- (T, F) or (T, F, DF) per fre element --- (epochs, chs, wins, points)
# time element --- (epochs, chs, epoch_sampling_points)
# initial_states --- ((((1, h_units, f) or None) * layers or None), None) or None
# last_outputs --- ((1, top_h_units, f) or None, None) or None
# refer --- (1, integrated_top_h_unit) or None
# no_begin --- None or 1-D Tensor (bool) True indicates needs to use self-attention
def call(self, inputs, initial_states=None, last_outputs=None, no_begin=None, training=None, refer=None,
classifier=True):
n_class = self.config.classes
if self.config.heap is not None:
time_heap = self.config.heap
fre_heap = int(self.config.heap * self.config.wins)
else:
time_heap = None
fre_heap = None
# for time
if inputs[0] is not None and self.select_t is not None:
t = tf.expand_dims(inputs[0], axis=-1)
t = self.select_t(t, training, time_heap)
t = self.dropout_t(t, training)
else:
t = None
# for frequency
if inputs[1] is not None and self.select_fre is not None:
epochs = inputs[1].shape[0]
f = tf.expand_dims(inputs[1], axis=-1)
if inputs[2] is not None and self.has_dynamics:
f = tf.concat([f, inputs[2]], axis=-1)
del inputs
gc.collect()
f = tf.transpose(f, (0, 2, 1, 3, 4))
f = tf.concat(tf.unstack(f, f.shape[0], axis=0), axis=0)
f = self.select_fre(f, training, fre_heap)
f = tf.stack(tf.split(f, epochs, axis=0), axis=0)
f = self.dropout_fre(f, training)
else:
del inputs
gc.collect()
f = None
if t is not None and f is not None:
all_in = tf.concat((t, f), axis=-1)
elif t is not None:
all_in = t
elif f is not None:
all_in = f
del t, f
gc.collect()
_f = all_in.shape[-1]
# (seqs, steps, 1, wins, chs, xxx, _f) mask output shape: (seqs, steps, 1)
all_in, mask = self.disassemble(all_in)
ini_s = initial_states
last_o = last_outputs
last_refer = refer
seqs = len(all_in)
# the last seq may have different steps
outputs = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
ch_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
win_attention = tf.TensorArray(self.dtype, size=seqs, infer_shape=False)
exp_no_begin = (no_begin,) + (None,) * (seqs - 1) # can't use tf.range
if classifier:
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.bi_wrapper(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = self.classification(tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1), last_refer,
temp_m, training)
last_refer = result[-1]
result = tf.concat(result[:-1], axis=-1) # (steps, 1, n_class + 2)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = (gru_o[1][0], None)
last_o = (gru_o[2][0], None) # top layer
outputs = outputs.concat() # (steps * seqs, 1, n_class + 2)
# (batch, n_class + 2)
outputs = tf.reshape(outputs, (-1, outputs.shape[-1]))
outputs = tf.split(outputs, n_class + 2, axis=-1)
out1 = [tf.concat(outputs[:n_class], axis=-1)]
out2 = [tf.concat(outputs[-2:], axis=-1)]
else:
last_refer = None
for i in range(seqs):
if mask is not None:
temp_m = mask[i]
else:
temp_m = None
gru_o = self.bi_wrapper(all_in[i], ini_s, last_o, temp_m, exp_no_begin[i], training,
self.config.relu_leaky, self.config.relu_threshold)
result = tf.concat(tf.unstack(gru_o[0], _f, axis=-1), axis=-1) # (steps, 1, integrated_top_units)
outputs = outputs.write(i, result)
ch_attention = ch_attention.write(i, gru_o[3])
win_attention = win_attention.write(i, gru_o[4])
ini_s = (gru_o[1][0], None)
last_o = (gru_o[2][0], None) # top layer
outputs = outputs.concat() # (steps * seqs, 1, integrated_top_units)
# (batch, integrated_top_units)
outputs = tf.reshape(outputs, (-1, outputs.shape[-1]))
out1 = [outputs]
out2 = None
next_ini = ini_s
next_last = last_o
next_refer = last_refer
ch_attention = tf.transpose(ch_attention.concat(), (0, 1, 2, 4, 3))
# (batch, 2, f, chs)
ch_attention = tf.reshape(ch_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(ch_attention.shape[-3:])], axis=0))
win_attention = tf.transpose(win_attention.concat(), (0, 1, 2, 5, 3, 4))
# (batch, 2, f, wins, chs)
win_attention = tf.reshape(win_attention, tf.concat([tf.constant((-1,), dtype='int32'),
tf.constant(win_attention.shape[-4:])], axis=0))
# 已经全部使用mask对齐过结果
# output --- (epochs, n_class/top_units) ch_att --- (epochs, 2, 1/3, chs)
# win_att --- (epochs, 2, 1/3, wins, chs)
# ini_s returned and last_o returned need tackling furthermore (should reset zeros or None when new_file starts)
return out1, out2, next_ini, next_last, [ch_attention], [win_attention], next_refer
def reset_ele(self, ch_info, ini, o, refer=None):
ch_info = tf.cast(ch_info, self.dtype)
exp_dims = o[0].shape.ndims - ch_info.shape.ndims
exp_ch_info = tf.reshape(ch_info, ch_info.shape + (1,) * exp_dims)
new_o = (tf.multiply(o[0], exp_ch_info), None)
new_ini = (tuple(tf.multiply(e, exp_ch_info) for e in ini[0]), None)
if refer is not None:
new_refer = tf.multiply(refer, tf.reshape(ch_info, ch_info.shape + (1,) * (refer.shape.ndims -
ch_info.shape.ndims)))
else:
new_refer = None
return new_ini, new_o, new_refer
| 51.95471
| 120
| 0.554535
| 7,454
| 57,358
| 4.072444
| 0.035551
| 0.104757
| 0.015417
| 0.008565
| 0.971999
| 0.969199
| 0.968244
| 0.968244
| 0.957702
| 0.95365
| 0
| 0.013051
| 0.333415
| 57,358
| 1,103
| 121
| 52.001813
| 0.780902
| 0.104031
| 0
| 0.94984
| 0
| 0
| 0.018393
| 0.00946
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055496
| false
| 0
| 0.004269
| 0.008538
| 0.115261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2cda356183fda295c48edd5cd5f9afaedd10edd
| 9,177
|
py
|
Python
|
test/test_objects.py
|
robertcragie/openleadr-python
|
c430b54f46328afd76181add72dcb49f7387b6cd
|
[
"Apache-2.0"
] | 75
|
2020-10-02T18:35:36.000Z
|
2022-03-26T06:46:42.000Z
|
test/test_objects.py
|
robertcragie/openleadr-python
|
c430b54f46328afd76181add72dcb49f7387b6cd
|
[
"Apache-2.0"
] | 93
|
2020-09-21T16:48:28.000Z
|
2022-03-24T13:35:31.000Z
|
test/test_objects.py
|
robertcragie/openleadr-python
|
c430b54f46328afd76181add72dcb49f7387b6cd
|
[
"Apache-2.0"
] | 27
|
2020-11-21T05:36:58.000Z
|
2022-03-22T23:41:49.000Z
|
from openleadr import objects, enums
from datetime import datetime, timedelta, timezone
from openleadr.utils import ensure_bytes
from openleadr.messaging import create_message, parse_message, validate_xml_schema
from pprint import pprint
import pytest
def test_oadr_event():
event = objects.Event(
event_descriptor=objects.EventDescriptor(
event_id=1,
modification_number=0,
market_context='MarketContext1',
event_status=enums.EVENT_STATUS.NEAR),
active_period=objects.ActivePeriod(
dtstart=datetime.now(),
duration=timedelta(minutes=10)),
event_signals=[objects.EventSignal(
intervals=[
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=0,
signal_payload=1),
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=1,
signal_payload=2)],
targets=[objects.Target(
ven_id='1234'
)],
signal_name=enums.SIGNAL_NAME.LOAD_CONTROL,
signal_type=enums.SIGNAL_TYPE.LEVEL,
signal_id=1,
current_value=0
)],
targets=[objects.Target(
ven_id='1234'
)]
)
response = objects.Response(response_code=200,
response_description='OK',
request_id='1234')
msg = create_message('oadrDistributeEvent', response=response, events=[event])
validate_xml_schema(ensure_bytes(msg))
message_type, message_payload = parse_message(msg)
def test_oadr_event_targets_by_type():
event = objects.Event(
event_descriptor=objects.EventDescriptor(
event_id=1,
modification_number=0,
market_context='MarketContext1',
event_status=enums.EVENT_STATUS.NEAR),
active_period=objects.ActivePeriod(
dtstart=datetime.now(),
duration=timedelta(minutes=10)),
event_signals=[objects.EventSignal(
intervals=[
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=0,
signal_payload=1),
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=1,
signal_payload=2)],
targets=[objects.Target(
ven_id='1234'
)],
signal_name=enums.SIGNAL_NAME.LOAD_CONTROL,
signal_type=enums.SIGNAL_TYPE.LEVEL,
signal_id=1,
current_value=0
)],
targets_by_type={'ven_id': ['ven123']}
)
msg = create_message('oadrDistributeEvent', events=[event])
validate_xml_schema(ensure_bytes(msg))
message_type, message_payload = parse_message(msg)
def test_oadr_event_targets_and_targets_by_type():
event = objects.Event(
event_descriptor=objects.EventDescriptor(
event_id=1,
modification_number=0,
market_context='MarketContext1',
event_status=enums.EVENT_STATUS.NEAR),
active_period=objects.ActivePeriod(
dtstart=datetime.now(),
duration=timedelta(minutes=10)),
event_signals=[objects.EventSignal(
intervals=[
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=0,
signal_payload=1),
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=1,
signal_payload=2)],
targets=[objects.Target(
ven_id='1234'
)],
signal_name=enums.SIGNAL_NAME.LOAD_CONTROL,
signal_type=enums.SIGNAL_TYPE.LEVEL,
signal_id=1,
current_value=0
)],
targets=[{'ven_id': 'ven123'}],
targets_by_type={'ven_id': ['ven123']}
)
msg = create_message('oadrDistributeEvent', events=[event])
validate_xml_schema(ensure_bytes(msg))
message_type, message_payload = parse_message(msg)
def test_oadr_event_targets_and_targets_by_type_invalid():
with pytest.raises(ValueError):
event = objects.Event(
event_descriptor=objects.EventDescriptor(
event_id=1,
modification_number=0,
market_context='MarketContext1',
event_status=enums.EVENT_STATUS.NEAR),
active_period=objects.ActivePeriod(
dtstart=datetime.now(),
duration=timedelta(minutes=10)),
event_signals=[objects.EventSignal(
intervals=[
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=0,
signal_payload=1),
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=1,
signal_payload=2)],
targets=[objects.Target(
ven_id='1234'
)],
signal_name=enums.SIGNAL_NAME.LOAD_CONTROL,
signal_type=enums.SIGNAL_TYPE.LEVEL,
signal_id=1,
current_value=0
)],
targets=[objects.Target(ven_id='ven456')],
targets_by_type={'ven_id': ['ven123']}
)
msg = create_message('oadrDistributeEvent', events=[event])
validate_xml_schema(ensure_bytes(msg))
message_type, message_payload = parse_message(msg)
def test_oadr_event_no_targets():
with pytest.raises(ValueError):
event = objects.Event(
event_descriptor=objects.EventDescriptor(
event_id=1,
modification_number=0,
market_context='MarketContext1',
event_status=enums.EVENT_STATUS.NEAR),
active_period=objects.ActivePeriod(
dtstart=datetime.now(),
duration=timedelta(minutes=10)),
event_signals=[objects.EventSignal(
intervals=[
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=0,
signal_payload=1),
objects.Interval(
dtstart=datetime.now(),
duration=timedelta(minutes=5),
uid=1,
signal_payload=2)],
targets=[objects.Target(
ven_id='1234'
)],
signal_name=enums.SIGNAL_NAME.LOAD_CONTROL,
signal_type=enums.SIGNAL_TYPE.LEVEL,
signal_id=1,
current_value=0
)]
)
def test_event_signal_with_grouped_targets():
event_signal = objects.EventSignal(intervals=[objects.Interval(dtstart=datetime.now(timezone.utc),
duration=timedelta(minutes=10),
signal_payload=1)],
signal_name='simple',
signal_type='level',
signal_id='signal123',
targets_by_type={'ven_id': ['ven123', 'ven456']})
assert event_signal.targets == [objects.Target(ven_id='ven123'), objects.Target(ven_id='ven456')]
def test_event_signal_with_incongruent_targets():
with pytest.raises(ValueError):
event_signal = objects.EventSignal(intervals=[objects.Interval(dtstart=datetime.now(timezone.utc),
duration=timedelta(minutes=10),
signal_payload=1)],
signal_name='simple',
signal_type='level',
signal_id='signal123',
targets=[objects.Target(ven_id='ven123')],
targets_by_type={'ven_id': ['ven123', 'ven456']})
def test_event_descriptor_modification_number():
event_descriptor = objects.EventDescriptor(event_id='event123',
modification_number=None,
market_context='http://marketcontext01',
event_status='near')
assert event_descriptor.modification_number == 0
| 39.9
| 106
| 0.517271
| 794
| 9,177
| 5.720403
| 0.115869
| 0.056143
| 0.067371
| 0.085865
| 0.876706
| 0.860414
| 0.831352
| 0.824967
| 0.824967
| 0.814839
| 0
| 0.026911
| 0.392612
| 9,177
| 229
| 107
| 40.074236
| 0.787944
| 0
| 0
| 0.848341
| 0
| 0
| 0.039011
| 0
| 0
| 0
| 0
| 0
| 0.009479
| 1
| 0.037915
| false
| 0
| 0.028436
| 0
| 0.066351
| 0.004739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2e35c14deeafbacd351d0639d5f9083e4f3cfcc
| 68,577
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_pinned/cmp_zeusmp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_pinned/cmp_zeusmp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_pinned/cmp_zeusmp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.241504,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.392377,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.18174,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.659379,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.14181,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.654858,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.45604,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.47059,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 8.01159,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.223256,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.023903,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.268243,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.176777,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.491499,
'Execution Unit/Register Files/Runtime Dynamic': 0.20068,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.714347,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.64154,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.09601,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00125527,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00125527,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00108777,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000418048,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00253942,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00613773,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0122344,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.169941,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.373805,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.577195,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.13931,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.233263,
'L2/Runtime Dynamic': 0.0590567,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.95857,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.93882,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.120398,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.120398,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.52943,
'Load Store Unit/Runtime Dynamic': 2.65297,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.296881,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.593761,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.105364,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.108854,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0613199,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.740647,
'Memory Management Unit/Runtime Dynamic': 0.170174,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 28.0454,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.778892,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0430896,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.332859,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.15484,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 10.2724,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.118287,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.295597,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.578812,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.279053,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.450102,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.227196,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.956351,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.230414,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.32808,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.10935,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0117047,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.131363,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0865636,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.240713,
'Execution Unit/Register Files/Runtime Dynamic': 0.0982684,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.306324,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.706696,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.46229,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000624109,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000624109,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00054343,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000210279,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00124349,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00303514,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0059899,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0832158,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.29324,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.183042,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.282638,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.76865,
'Instruction Fetch Unit/Runtime Dynamic': 0.557921,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.114535,
'L2/Runtime Dynamic': 0.029192,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.05909,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.949667,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0589452,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0589451,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.33745,
'Load Store Unit/Runtime Dynamic': 1.29931,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.145349,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.290697,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0515848,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0532983,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.329114,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0300266,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.573837,
'Memory Management Unit/Runtime Dynamic': 0.0833249,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.712,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.28765,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0160907,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.138358,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.442098,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.87413,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.118279,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.29559,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.578774,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.279038,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.450078,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.227184,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.9563,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.230405,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.32799,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.109343,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0117041,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.131356,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.086559,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.240698,
'Execution Unit/Register Files/Runtime Dynamic': 0.0982631,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.306306,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.706658,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.46219,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000624084,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000624084,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000543408,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00021027,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00124343,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.003035,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00598966,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0832114,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.29296,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.183032,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.282623,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.76835,
'Instruction Fetch Unit/Runtime Dynamic': 0.557892,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.11453,
'L2/Runtime Dynamic': 0.0291911,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.05901,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.94963,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0589425,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0589425,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.33735,
'Load Store Unit/Runtime Dynamic': 1.29926,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.145342,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.290685,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0515824,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.053296,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.329097,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0300251,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.573815,
'Memory Management Unit/Runtime Dynamic': 0.0833211,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.7115,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.287631,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0160898,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.13835,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.442071,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.87392,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.118319,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.295621,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.578963,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.279115,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.450202,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.227247,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.956564,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.230463,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.32846,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.109379,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0117073,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.131395,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0865829,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.240773,
'Execution Unit/Register Files/Runtime Dynamic': 0.0982902,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.306398,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.706853,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.46271,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00062422,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00062422,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000543527,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000210316,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00124377,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00303574,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00599098,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0832344,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.29442,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.183081,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.282701,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.76988,
'Instruction Fetch Unit/Runtime Dynamic': 0.558043,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.11457,
'L2/Runtime Dynamic': 0.0291984,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.05943,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.94985,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0589561,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.058956,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.33784,
'Load Store Unit/Runtime Dynamic': 1.29956,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.145376,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.290751,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0515943,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0533084,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.329188,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.030033,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.573926,
'Memory Management Unit/Runtime Dynamic': 0.0833414,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.7141,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.287726,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0160945,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.138388,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.442208,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.87505,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.9189973378443441,
'Runtime Dynamic': 1.9189973378443441,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.211798,
'Runtime Dynamic': 0.110577,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 90.3948,
'Peak Power': 123.507,
'Runtime Dynamic': 25.0061,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 90.183,
'Total Cores/Runtime Dynamic': 24.8955,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.211798,
'Total L3s/Runtime Dynamic': 0.110577,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.02954
| 124
| 0.681934
| 8,082
| 68,577
| 5.780376
| 0.067558
| 0.123638
| 0.113021
| 0.093499
| 0.939401
| 0.931374
| 0.918723
| 0.886936
| 0.86232
| 0.843055
| 0
| 0.131426
| 0.224434
| 68,577
| 914
| 125
| 75.02954
| 0.746945
| 0
| 0
| 0.642232
| 0
| 0
| 0.657718
| 0.04812
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2f18f0b82703e3a65b991395ae3ffadb3244314
| 15,900
|
py
|
Python
|
pyapm/tools/trim.py
|
Xero64/pyapm
|
a50321503a13faf27c10b8413d8d2dea5cd185f6
|
[
"MIT"
] | 1
|
2021-03-17T04:40:46.000Z
|
2021-03-17T04:40:46.000Z
|
pyapm/tools/trim.py
|
Xero64/pyapm
|
a50321503a13faf27c10b8413d8d2dea5cd185f6
|
[
"MIT"
] | null | null | null |
pyapm/tools/trim.py
|
Xero64/pyapm
|
a50321503a13faf27c10b8413d8d2dea5cd185f6
|
[
"MIT"
] | null | null | null |
from pygeom.geom3d import Vector
from .mass import Mass, MassCollection
class LoopingTrim():
name = None
sys = None
gravacc = None
speed = None
density = None
mass = None
loadfac = None
_weight = None
_lift = None
_dynpres = None
_acc = None
_rad = None
_CL = None
_prate = None
_qco2V = None
def __init__(self, name: str, sys: object):
self.name = name
self.sys = sys
self.gravacc = 9.80665
def reset(self):
for attr in self.__dict__:
if attr[0] == '_':
self.__dict__[attr] = None
def set_gravitational_acceleration(self, gravacc: float):
self.gravacc = gravacc
self.reset()
def set_speed_and_density(self, speed: float, density: float):
self.speed = speed
self.density = density
self.reset()
def set_mass(self, mass):
if isinstance(mass, str):
self.mass = self.sys.masses[mass]
elif isinstance(mass, float):
self.mass = Mass(self.name + ' Mass', mass,
self.sys.rref.x, self.sys.rref.y, self.sys.rref.z)
elif isinstance(mass, (Mass, MassCollection)):
self.mass = mass
self.reset()
def set_load_factor(self, loadfac: float):
self.loadfac = loadfac
self.reset()
def create_trim_result(self):
from ..classes import PanelTrim
ltrm = PanelTrim(self.name, self.sys)
ltrm.set_density(rho=self.density)
ltrm.set_state(speed=self.speed, qco2V=self.qco2V)
ltrm.set_targets(CLt=self.CL)
rcg = Vector(self.mass.xcm, self.mass.ycm, self.mass.zcm)
ltrm.set_cg(rcg)
return ltrm
@property
def weight(self):
if self._weight is None:
self._weight = self.mass.mass*self.gravacc
return self._weight
@property
def lift(self):
if self._lift is None:
self._lift = self.loadfac*self.weight
return self._lift
@property
def dynpres(self):
if self._dynpres is None:
self._dynpres = self.density*self.speed**2/2
return self._dynpres
@property
def CL(self):
if self._CL is None:
self._CL = self.lift/self.dynpres/self.sys.sref
return self._CL
@property
def acc(self):
if self._acc is None:
self._acc = (self.loadfac-1)*self.gravacc
return self._acc
@property
def rad(self):
if self._rad is None:
if self.acc == 0.0:
self._rad = float('inf')
else:
self._rad = self.speed**2/self.acc
return self._rad
@property
def prate(self):
if self._prate is None:
self._prate = self.acc/self.speed
return self._prate
@property
def qco2V(self):
if self._qco2V is None:
self._qco2V = self.prate*self.sys.cref/2/self.speed
return self._qco2V
def __str__(self):
from py2md.classes import MDTable
outstr = '# Looping Trim State '+self.name+' for '+self.sys.name+'\n'
table = MDTable()
table.add_column('Speed', '.3f', data=[self.speed])
table.add_column('Density', '.3f', data=[self.density])
table.add_column('Dyn. Press.', '.3f', data=[self.dynpres])
outstr += str(table)
table = MDTable()
table.add_column('Mass', '.3f', data=[self.mass.mass])
table.add_column('Grav. Acc.', '.5f', data=[self.gravacc])
table.add_column('Weight', '.3f', data=[self.weight])
outstr += str(table)
table = MDTable()
table.add_column('Load Factor', '.3f', data=[self.loadfac])
table.add_column('Lift', '.3f', data=[self.lift])
table.add_column('CL', '.5f', data=[self.CL])
outstr += str(table)
table = MDTable()
table.add_column('Acceleration', '.3f', data=[self.acc])
table.add_column('Radius', '.3f', data=[self.rad])
table.add_column('Pitch Rate', '.5f', data=[self.prate])
outstr += str(table)
return outstr
def _repr_markdown_(self):
return self.__str__()
class TurningTrim():
name = None
sys = None
gravacc = None
speed = None
density = None
mass = None
bankang = None
_loadfac = None
_weight = None
_lift = None
_dynpres = None
_acc = None
_rad = None
_CL = None
_prate = None
_rrate = None
_qco2V = None
_rbo2V = None
def __init__(self, name: str, sys: object):
self.name = name
self.sys = sys
self.gravacc = 9.80665
def reset(self):
for attr in self.__dict__:
if attr[0] == '_':
self.__dict__[attr] = None
def set_gravitational_acceleration(self, gravacc: float):
self.gravacc = gravacc
self.reset()
def set_speed_and_density(self, speed: float, density: float):
self.speed = speed
self.density = density
self.reset()
def set_mass(self, mass):
if isinstance(mass, str):
self.mass = self.sys.masses[mass]
elif isinstance(mass, float):
self.mass = Mass(self.name + ' Mass', mass,
self.sys.rref.x, self.sys.rref.y, self.sys.rref.z)
elif isinstance(mass, (Mass, MassCollection)):
self.mass = mass
self.reset()
def set_bank_angle(self, bankang: float):
self.bankang = bankang
self.reset()
def create_trim_result(self):
from ..classes import PanelTrim
ltrm = PanelTrim(self.name, self.sys)
ltrm.set_density(rho=self.density)
ltrm.set_state(speed=self.speed, qco2V=self.qco2V, rbo2V=self.rbo2V)
ltrm.set_targets(CLt=self.CL)
rcg = Vector(self.mass.xcm, self.mass.ycm, self.mass.zcm)
ltrm.set_cg(rcg)
return ltrm
@property
def loadfac(self):
if self._loadfac is None:
from math import radians, cos
brad = radians(self.bankang)
self._loadfac = 1.0/cos(brad)
return self._loadfac
@property
def weight(self):
if self._weight is None:
self._weight = self.mass.mass*self.gravacc
return self._weight
@property
def lift(self):
if self._lift is None:
self._lift = self.loadfac*self.weight
return self._lift
@property
def dynpres(self):
if self._dynpres is None:
self._dynpres = self.density*self.speed**2/2
return self._dynpres
@property
def CL(self):
if self._CL is None:
self._CL = self.lift/self.dynpres/self.sys.sref
return self._CL
@property
def acc(self):
if self._acc is None:
self._acc = (self.loadfac**2-1.0)**0.5*self.gravacc
return self._acc
@property
def rad(self):
if self._rad is None:
if self.acc == 0.0:
self._rad = float('inf')
else:
self._rad = self.speed**2/self.acc
return self._rad
@property
def prate(self):
if self._prate is None:
if self.acc != 0.0:
fac = (self.loadfac**2-1.0)/self.loadfac
self._prate = self.gravacc/self.speed*fac
else:
self._prate = 0.0
return self._prate
@property
def rrate(self):
if self._rrate is None:
if self.acc != 0.0:
self._rrate = self.acc/self.speed/self.loadfac
else:
self._rrate = 0.0
return self._rrate
@property
def qco2V(self):
if self._qco2V is None:
self._qco2V = self.prate*self.sys.cref/2/self.speed
return self._qco2V
@property
def rbo2V(self):
if self._rbo2V is None:
self._rbo2V = self.rrate*self.sys.bref/2/self.speed
return self._rbo2V
def __str__(self):
from py2md.classes import MDTable
outstr = '# Turning Trim State '+self.name+' for '+self.sys.name+'\n'
table = MDTable()
table.add_column('Speed', '.3f', data=[self.speed])
table.add_column('Density', '.3f', data=[self.density])
table.add_column('Dyn. Press.', '.3f', data=[self.dynpres])
outstr += str(table)
table = MDTable()
table.add_column('Mass', '.3f', data=[self.mass.mass])
table.add_column('Grav. Acc.', '.5f', data=[self.gravacc])
table.add_column('Weight', '.3f', data=[self.weight])
outstr += str(table)
table = MDTable()
table.add_column('Bank Angle (deg)', '.1f', data=[self.bankang])
table.add_column('Load Factor', '.3f', data=[self.loadfac])
table.add_column('Lift', '.3f', data=[self.lift])
table.add_column('CL', '.5f', data=[self.CL])
outstr += str(table)
table = MDTable()
table.add_column('Acceleration', '.3f', data=[self.acc])
table.add_column('Turn Radius', '.3f', data=[self.rad])
table.add_column('Pitch Rate', '.5f', data=[self.prate])
table.add_column('Roll Rate', '.5f', data=[self.rrate])
outstr += str(table)
return outstr
def _repr_markdown_(self):
return self.__str__()
class LevelTrim():
name = None
sys = None
gravacc = None
speed = None
density = None
mass = None
_weight = None
_lift = None
_dynpres = None
_CL = None
def __init__(self, name: str, sys: object):
self.name = name
self.sys = sys
self.gravacc = 9.80665
def reset(self):
for attr in self.__dict__:
if attr[0] == '_':
self.__dict__[attr] = None
def set_gravitational_acceleration(self, gravacc: float):
self.gravacc = gravacc
self.reset()
def set_density(self, density: float):
self.density = density
self.reset()
def set_speed(self, speed: float):
self.speed = speed
self.reset()
def set_mass(self, mass):
if isinstance(mass, str):
self.mass = self.sys.masses[mass]
elif isinstance(mass, float):
self.mass = Mass(self.name + ' Mass', mass,
self.sys.rref.x, self.sys.rref.y, self.sys.rref.z)
elif isinstance(mass, (Mass, MassCollection)):
self.mass = mass
self.reset()
def create_trim_result(self):
from ..classes import PanelTrim
lres = PanelTrim(self.name, self.sys)
lres.set_density(rho=self.density)
lres.set_state(speed=self.speed)
return lres
@property
def weight(self):
if self._weight is None:
self._weight = self.mass.mass*self.gravacc
return self._weight
@property
def lift(self):
if self._lift is None:
self._lift = self.weight
return self._lift
@property
def dynpres(self):
if self._dynpres is None:
self._dynpres = self.density*self.speed**2/2
return self._dynpres
@property
def CL(self):
if self._CL is None:
self._CL = self.lift/self.dynpres/self.sys.sref
return self._CL
def trim_speed_from_CL(self, CL: float):
if self.mass is not None and self.density is not None:
W = self.weight
S = self.sys.sref
rho = self.density
self.speed = (W/S/rho/CL*2)**0.5
self._CL = CL
def __str__(self):
from py2md.classes import MDTable
outstr = '# Level Trim State '+self.name+' for '+self.sys.name+'\n'
table = MDTable()
table.add_column('Speed', '.3f', data=[self.speed])
table.add_column('Density', '.3f', data=[self.density])
table.add_column('Dyn. Press.', '.3f', data=[self.dynpres])
outstr += str(table)
table = MDTable()
table.add_column('Mass', '.3f', data=[self.mass.mass])
table.add_column('Grav. Acc.', '.5f', data=[self.gravacc])
table.add_column('Weight', '.3f', data=[self.weight])
outstr += str(table)
table = MDTable()
table.add_column('Lift', '.3f', data=[self.lift])
table.add_column('CL', '.5f', data=[self.CL])
outstr += str(table)
return outstr
def _repr_markdown_(self):
return self.__str__()
class LoadTrim():
name: str = None
sys: object = None
speed: float = None
density: float = None
L: float = None
Y: float = None
l: float = None
m: float = None
n: float = None
_dynpres: float = None
_CL: float = None
_CY: float = None
_Cl: float = None
_Cm: float = None
_Cn: float = None
def __init__(self, name: str, sys: object):
self.name = name
self.sys = sys
def reset(self):
for attr in self.__dict__:
if attr[0] == '_':
self.__dict__[attr] = None
def set_speed_and_density(self, speed: float, density: float):
self.speed = speed
self.density = density
self.reset()
def set_loads(self, L: float=None, Y: float=None,
l: float=None, m: float=None, n: float=None):
self.L = L
self.Y = Y
self.l = l
self.m = m
self.n = n
def create_trim_result(self):
from ..classes import PanelTrim
ltrm = PanelTrim(self.name, self.sys)
ltrm.set_density(rho=self.density)
ltrm.set_state(speed=self.speed)
ltrm.set_targets(CLt=self.CL, CYt=self.CY,
Clt=self.Cl, Cmt=self.Cm, Cnt=self.Cn)
return ltrm
@property
def dynpres(self):
if self._dynpres is None:
self._dynpres = self.density*self.speed**2/2
return self._dynpres
@property
def CL(self):
if self._CL is None:
if self.L is not None:
self._CL = self.L/self.dynpres/self.sys.sref
return self._CL
@property
def CY(self):
if self._CY is None:
if self.Y is not None:
self._CY = self.Y/self.dynpres/self.sys.sref
return self._CY
@property
def Cl(self):
if self._Cl is None:
if self.l is not None:
self._Cl = self.l/self.dynpres/self.sys.sref/self.sys.bref
return self._Cl
@property
def Cm(self):
if self._Cm is None:
if self.m is not None:
self._Cm = self.m/self.dynpres/self.sys.sref/self.sys.cref
return self._Cm
@property
def Cn(self):
if self._Cn is None:
if self.n is not None:
self._Cn = self.n/self.dynpres/self.sys.sref/self.sys.bref
return self._Cn
def __str__(self):
from py2md.classes import MDTable
outstr = '# Load Trim State '+self.name+' for '+self.sys.name+'\n'
table = MDTable()
table.add_column('Speed', '.3f', data=[self.speed])
table.add_column('Density', '.3f', data=[self.density])
table.add_column('Dyn. Press.', '.3f', data=[self.dynpres])
outstr += str(table)
table = MDTable()
table.add_column('L', '.3f', data=[self.L])
table.add_column('Y', '.3f', data=[self.Y])
table.add_column('l', '.3f', data=[self.l])
table.add_column('m', '.3f', data=[self.m])
table.add_column('n', '.3f', data=[self.n])
outstr += str(table)
table = MDTable()
table.add_column('CL', '.5f', data=[self.CL])
table.add_column('CY', '.5f', data=[self.CY])
table.add_column('Cl', '.5f', data=[self.Cl])
table.add_column('Cm', '.5f', data=[self.Cm])
table.add_column('Cn', '.5f', data=[self.Cn])
outstr += str(table)
return outstr
def _repr_markdown_(self):
return self.__str__()
| 33.829787
| 79
| 0.563648
| 2,064
| 15,900
| 4.190407
| 0.0625
| 0.043473
| 0.076078
| 0.032374
| 0.848075
| 0.82102
| 0.818361
| 0.802867
| 0.797433
| 0.776622
| 0
| 0.012173
| 0.307673
| 15,900
| 469
| 80
| 33.901919
| 0.773528
| 0
| 0
| 0.750538
| 0
| 0
| 0.035597
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137634
| false
| 0
| 0.023656
| 0.008602
| 0.382796
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e8f3579c1cce388e561b963db53e90e00b5aef1b
| 17,118
|
py
|
Python
|
lithic/resources/account_holders.py
|
lithic-com/lithic-python
|
be19d7195ebdf217b45f1ab59b39021d51330989
|
[
"Apache-2.0"
] | null | null | null |
lithic/resources/account_holders.py
|
lithic-com/lithic-python
|
be19d7195ebdf217b45f1ab59b39021d51330989
|
[
"Apache-2.0"
] | null | null | null |
lithic/resources/account_holders.py
|
lithic-com/lithic-python
|
be19d7195ebdf217b45f1ab59b39021d51330989
|
[
"Apache-2.0"
] | null | null | null |
# File generated from our OpenAPI spec by Stainless.
from typing import Optional, Union, List, Dict
from .._core import Timeout, make_request_options
from .._resource import SyncAPIResource, AsyncAPIResource
from .._models import StringModel, NoneModel
from ..pagination import SyncPage, AsyncPage
from ..types.account_holder import *
from ..types.account_holder_document import *
from ..types.account_holder_create_webhook_response import *
from ..types.account_holder_list_documents_response import *
from ..types.account_holder_create_params import *
from ..types.account_holder_create_webhook_params import *
from ..types.account_holder_resubmit_params import *
from ..types.account_holder_upload_document_params import *
__all__ = ["AccountHolders", "AsyncAccountHolders"]
class AccountHolders(SyncAPIResource):
def create(
self,
body: AccountHolderCreateParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolder:
"""Run an individual or business's information through the Customer
Identification Program (CIP) and return an `account_token` if the
status is accepted or pending (i.e., further action required).
All calls to this endpoint will return an immediate response - though in some cases, the response may indicate the workflow is under review or further action will be needed to complete the account creation process. This endpoint can only be used on accounts that are part of the program the calling API key manages.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.post("/account_holders", model=AccountHolder, body=body, options=options)
def retrieve(
self,
id: str,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolder:
"""Check the current status of a KYC or KYB evaluation."""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.get(f"/account_holders/{id}", model=AccountHolder, options=options)
def create_webhook(
self,
body: AccountHolderCreateWebhookParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderCreateWebhookResponse:
"""Create a webhook to receive KYC or KYB evaluation events.
There are two types of account holder webhooks: - `verification`: Webhook sent when the status of a KYC or KYB evaluation changes from `PENDING_DOCUMENT` (KYC) or `PENDING` (KYB) to `ACCEPTED` or `REJECTED`. - `document_upload_front`/`document_upload_back`: Webhook sent when a document upload fails. After a webhook has been created, this endpoint can be used to rotate a webhooks HMAC token or modify the registered URL. Only a single webhook is allowed per program.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.post(
"/webhooks/account_holders", model=AccountHolderCreateWebhookResponse, body=body, options=options
)
def list_documents(
self,
id: str,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderListDocumentsResponse:
"""Retrieve the status of account holder document uploads, or retrieve
the upload URLs to process your image uploads.
Note that this is not equivalent to checking the status of the
KYC evaluation overall (a document may be successfully uploaded
but not be sufficient for KYC to pass). In the event your upload
URLs have expired, calling this endpoint will refresh them.
Similarly, in the event a previous account holder document
upload has failed, you can use this endpoint to get a new upload
URL for the failed image upload. When a new document upload is
generated for a failed attempt, the response will show an
additional entry in the `required_document_uploads` list in a
`PENDING` state for the corresponding `image_type`.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.get(f"/account_holders/{id}/documents", model=AccountHolderListDocumentsResponse, options=options)
def resubmit(
self,
id: str,
body: AccountHolderResubmitParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolder:
"""Resubmit a KYC submission.
This endpoint should be used in cases where a KYC submission
returned a `PENDING_RESUBMIT` result, meaning one or more
critical KYC fields may have been mis-entered and the
individual's identity has not yet been successfully verified.
This step must be completed in order to proceed with the KYC
evaluation. Two resubmission attempts are permitted via this
endpoint before a `REJECTED` status is returned and the account
creation process is ended.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.post(f"/account_holders/{id}/resubmit", model=AccountHolder, body=body, options=options)
def retrieve_document(
self,
account_holder_token: str,
id: str,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderDocument:
"""Check the status of an account holder document upload, or retrieve
the upload URLs to process your image uploads.
Note that this is not equivalent to checking the status of the
KYC evaluation overall (a document may be successfully uploaded
but not be sufficient for KYC to pass). In the event your upload
URLs have expired, calling this endpoint will refresh them.
Similarly, in the event a document upload has failed, you can
use this endpoint to get a new upload URL for the failed image
upload. When a new account holder document upload is generated
for a failed attempt, the response will show an additional entry
in the `required_document_uploads` array in a `PENDING` state
for the corresponding `image_type`.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.get(
f"/account_holders/{account_holder_token}/documents/{id}", model=AccountHolderDocument, options=options
)
def upload_document(
self,
id: str,
body: AccountHolderUploadDocumentParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderDocument:
"""Use this endpoint to identify which type of supported government-
issued documentation you will upload for further verification.
It will return two URLs to upload your document images to - one for the front image and one for the back image. This endpoint is only valid for evaluations in a `PENDING_DOCUMENT` state. Uploaded images must either be a `jpg` or `png` file, and each must be less than 15 MiB. Once both required uploads have been successfully completed, your document will be run through KYC verification. If you have registered a webhook, you will receive evaluation updates for any document submission evaluations, as well as for any failed document uploads. Two document submission attempts are permitted via this endpoint before a `REJECTED` status is returned and the account creation process is ended. Currently only one type of account holder document is supported per KYC verification.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return self.post(f"/account_holders/{id}/documents", model=AccountHolderDocument, body=body, options=options)
class AsyncAccountHolders(AsyncAPIResource):
async def create(
self,
body: AccountHolderCreateParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolder:
"""Run an individual or business's information through the Customer
Identification Program (CIP) and return an `account_token` if the
status is accepted or pending (i.e., further action required).
All calls to this endpoint will return an immediate response - though in some cases, the response may indicate the workflow is under review or further action will be needed to complete the account creation process. This endpoint can only be used on accounts that are part of the program the calling API key manages.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.post("/account_holders", model=AccountHolder, body=body, options=options)
async def retrieve(
self,
id: str,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolder:
"""Check the current status of a KYC or KYB evaluation."""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.get(f"/account_holders/{id}", model=AccountHolder, options=options)
async def create_webhook(
self,
body: AccountHolderCreateWebhookParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderCreateWebhookResponse:
"""Create a webhook to receive KYC or KYB evaluation events.
There are two types of account holder webhooks: - `verification`: Webhook sent when the status of a KYC or KYB evaluation changes from `PENDING_DOCUMENT` (KYC) or `PENDING` (KYB) to `ACCEPTED` or `REJECTED`. - `document_upload_front`/`document_upload_back`: Webhook sent when a document upload fails. After a webhook has been created, this endpoint can be used to rotate a webhooks HMAC token or modify the registered URL. Only a single webhook is allowed per program.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.post(
"/webhooks/account_holders", model=AccountHolderCreateWebhookResponse, body=body, options=options
)
async def list_documents(
self,
id: str,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderListDocumentsResponse:
"""Retrieve the status of account holder document uploads, or retrieve
the upload URLs to process your image uploads.
Note that this is not equivalent to checking the status of the
KYC evaluation overall (a document may be successfully uploaded
but not be sufficient for KYC to pass). In the event your upload
URLs have expired, calling this endpoint will refresh them.
Similarly, in the event a previous account holder document
upload has failed, you can use this endpoint to get a new upload
URL for the failed image upload. When a new document upload is
generated for a failed attempt, the response will show an
additional entry in the `required_document_uploads` list in a
`PENDING` state for the corresponding `image_type`.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.get(
f"/account_holders/{id}/documents", model=AccountHolderListDocumentsResponse, options=options
)
async def resubmit(
self,
id: str,
body: AccountHolderResubmitParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolder:
"""Resubmit a KYC submission.
This endpoint should be used in cases where a KYC submission
returned a `PENDING_RESUBMIT` result, meaning one or more
critical KYC fields may have been mis-entered and the
individual's identity has not yet been successfully verified.
This step must be completed in order to proceed with the KYC
evaluation. Two resubmission attempts are permitted via this
endpoint before a `REJECTED` status is returned and the account
creation process is ended.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.post(f"/account_holders/{id}/resubmit", model=AccountHolder, body=body, options=options)
async def retrieve_document(
self,
account_holder_token: str,
id: str,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderDocument:
"""Check the status of an account holder document upload, or retrieve
the upload URLs to process your image uploads.
Note that this is not equivalent to checking the status of the
KYC evaluation overall (a document may be successfully uploaded
but not be sufficient for KYC to pass). In the event your upload
URLs have expired, calling this endpoint will refresh them.
Similarly, in the event a document upload has failed, you can
use this endpoint to get a new upload URL for the failed image
upload. When a new account holder document upload is generated
for a failed attempt, the response will show an additional entry
in the `required_document_uploads` array in a `PENDING` state
for the corresponding `image_type`.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.get(
f"/account_holders/{account_holder_token}/documents/{id}", model=AccountHolderDocument, options=options
)
async def upload_document(
self,
id: str,
body: AccountHolderUploadDocumentParams,
*,
headers: Optional[Dict[str, str]] = None,
max_retries: Optional[int] = None,
timeout: Optional[Union[float, Timeout]] = None,
) -> AccountHolderDocument:
"""Use this endpoint to identify which type of supported government-
issued documentation you will upload for further verification.
It will return two URLs to upload your document images to - one for the front image and one for the back image. This endpoint is only valid for evaluations in a `PENDING_DOCUMENT` state. Uploaded images must either be a `jpg` or `png` file, and each must be less than 15 MiB. Once both required uploads have been successfully completed, your document will be run through KYC verification. If you have registered a webhook, you will receive evaluation updates for any document submission evaluations, as well as for any failed document uploads. Two document submission attempts are permitted via this endpoint before a `REJECTED` status is returned and the account creation process is ended. Currently only one type of account holder document is supported per KYC verification.
"""
headers = {"Accept": "application/json", **(headers or {})}
options = make_request_options(headers, max_retries, timeout)
return await self.post(
f"/account_holders/{id}/documents", model=AccountHolderDocument, body=body, options=options
)
| 53.830189
| 784
| 0.686821
| 2,127
| 17,118
| 5.457922
| 0.117066
| 0.024119
| 0.023258
| 0.026531
| 0.961495
| 0.959083
| 0.945559
| 0.938496
| 0.938496
| 0.937635
| 0
| 0.000306
| 0.236126
| 17,118
| 317
| 785
| 54
| 0.887504
| 0.231978
| 0
| 0.744681
| 1
| 0
| 0.089353
| 0.045326
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037234
| false
| 0
| 0.069149
| 0
| 0.191489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
334db08e11c83565aad2f90d0e862731f3e0861a
| 4,609
|
py
|
Python
|
app/schemas/operator_bot.py
|
germainlefebvre4/cryptobot-controller
|
4f511d27ac94c0abdb5188a1b0cc133640da852f
|
[
"MIT"
] | null | null | null |
app/schemas/operator_bot.py
|
germainlefebvre4/cryptobot-controller
|
4f511d27ac94c0abdb5188a1b0cc133640da852f
|
[
"MIT"
] | 1
|
2021-09-29T12:59:02.000Z
|
2021-09-29T12:59:02.000Z
|
app/schemas/operator_bot.py
|
germainlefebvre4/cryptobot-controller
|
4f511d27ac94c0abdb5188a1b0cc133640da852f
|
[
"MIT"
] | null | null | null |
from typing import Optional
from datetime import date, datetime
from pydantic import BaseModel
class OperatorBotBase(BaseModel):
user_id: int
binance_api_url: str = "https://api.binance.com"
binance_api_key: str
binance_api_secret: str
binance_config_base_currency: str
binance_config_quote_currency: str
binance_config_granularity: str = "15m"
binance_config_live: bool = False
binance_config_verbose : bool = True
binance_config_graphs: bool = False
binance_config_buymaxsize: float
binance_config_sellupperpcnt: float
binance_config_selllowerpcnt: float
binance_config_disablebullonly: bool
binance_config_disablebuynearhigh: bool
binance_config_disablebuymacd: bool
binance_config_disablebuyema: bool
binance_config_disablebuyobv: bool
binance_config_disablebuyelderray: bool
binance_config_disablefailsafefibonaccilow: bool
binance_config_disablefailsafelowerpcnt: bool
binance_config_disableprofitbankupperpcnt: bool
binance_config_disableprofitbankfibonaccihigh: bool
binance_config_disableprofitbankreversal: bool
logger_filelog: bool = False
logger_logfile: str = "pycryptobot.log"
logger_fileloglevel: str = "INFO"
logger_consolelog: bool = True
logger_consoleloglevel: str
telegram_client_id: Optional[str]
telegram_token: Optional[str]
class OperatorBotCreate(BaseModel):
user_id: int
binance_api_url: str = "https://api.binance.com"
binance_api_key: str
binance_api_secret: str
binance_config_base_currency: str
binance_config_quote_currency: str
binance_config_granularity: str = "15m"
binance_config_live: bool = False
binance_config_verbose : bool = True
binance_config_graphs: bool = False
binance_config_buymaxsize: float
binance_config_sellupperpcnt: float
binance_config_selllowerpcnt: float
binance_config_disablebullonly: bool
binance_config_disablebuynearhigh: bool
binance_config_disablebuymacd: bool
binance_config_disablebuyema: bool
binance_config_disablebuyobv: bool
binance_config_disablebuyelderray: bool
binance_config_disablefailsafefibonaccilow: bool
binance_config_disablefailsafelowerpcnt: bool
binance_config_disableprofitbankupperpcnt: bool
binance_config_disableprofitbankfibonaccihigh: bool
binance_config_disableprofitbankreversal: bool
logger_filelog: bool = False
logger_logfile: str = "pycryptobot.log"
logger_fileloglevel: str = "INFO"
logger_consolelog: bool = True
logger_consoleloglevel: str
telegram_client_id: Optional[str]
telegram_token: Optional[str]
class OperatorBotUpdate(BaseModel):
binance_api_url: str = "https://api.binance.com"
binance_api_key: str
binance_api_secret: str
binance_config_granularity: str = "15m"
binance_config_live: bool = False
binance_config_verbose : bool = True
binance_config_graphs: bool = False
binance_config_buymaxsize: float
binance_config_sellupperpcnt: float
binance_config_selllowerpcnt: float
binance_config_disablebullonly: Optional[bool] = False
binance_config_disablebuynearhigh: Optional[bool] = False
binance_config_disablebuymacd: Optional[bool] = False
binance_config_disablebuyema: Optional[bool] = False
binance_config_disablebuyobv: Optional[bool] = False
binance_config_disablebuyelderray: Optional[bool] = False
binance_config_disablefailsafefibonaccilow: Optional[bool] = False
binance_config_disablefailsafelowerpcnt: Optional[bool] = False
binance_config_disableprofitbankupperpcnt: Optional[bool] = False
binance_config_disableprofitbankfibonaccihigh: Optional[bool] = False
binance_config_disableprofitbankreversal: Optional[bool] = False
logger_filelog: bool = False
logger_logfile: str = "pycryptobot.log"
logger_fileloglevel: str = "INFO"
logger_consolelog: bool = True
logger_consoleloglevel: str
telegram_client_id: Optional[str]
telegram_token: Optional[str]
class OperatorBotDelete(BaseModel):
name: str
pass
class OperatorBot(BaseModel):
name: str
binance_config_granularity: str = "15m"
binance_config_live: bool = False
binance_config_verbose : bool = True
binance_config_graphs: bool = False
binance_config_buymaxsize: float
binance_config_sellupperpcnt: float
binance_config_selllowerpcnt: float
logger_filelog: bool = False
logger_logfile: str = "pycryptobot.log"
logger_fileloglevel: str = "INFO"
logger_consolelog: bool = True
logger_consoleloglevel: str
| 37.169355
| 73
| 0.781514
| 496
| 4,609
| 6.893145
| 0.133065
| 0.247148
| 0.099444
| 0.115823
| 0.87277
| 0.785025
| 0.785025
| 0.785025
| 0.785025
| 0.785025
| 0
| 0.002077
| 0.164461
| 4,609
| 123
| 74
| 37.471545
| 0.885744
| 0
| 0
| 0.823009
| 0
| 0
| 0.034064
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.00885
| 0.026549
| 0
| 0.99115
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
336659651c31eaf4d3d4b709a6a8de2113a798ca
| 5,561
|
py
|
Python
|
venv/Lib/site-packages/_soundfile.py
|
Terrathaw/ba21_loma_2_py
|
eebf5104dd054cef1ab61f0b257933ff679e75ec
|
[
"MIT"
] | 2
|
2019-09-06T05:52:08.000Z
|
2019-11-20T02:26:33.000Z
|
venv/Lib/site-packages/_soundfile.py
|
Terrathaw/ba21_loma_2_py
|
eebf5104dd054cef1ab61f0b257933ff679e75ec
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/_soundfile.py
|
Terrathaw/ba21_loma_2_py
|
eebf5104dd054cef1ab61f0b257933ff679e75ec
|
[
"MIT"
] | null | null | null |
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI('_soundfile',
_version = 0x2601,
_types = b'\x00\x00\x17\x0D\x00\x00\x6D\x03\x00\x00\x07\x01\x00\x00\x6C\x03\x00\x00\x7A\x03\x00\x00\x00\x0F\x00\x00\x17\x0D\x00\x00\x6F\x03\x00\x00\x07\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x17\x0D\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x03\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x17\x0D\x00\x00\x7B\x03\x00\x00\x07\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x6E\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x17\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x6C\x03\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x17\x11\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x17\x11\x00\x00\x6F\x03\x00\x00\x1C\x01\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x17\x11\x00\x00\x07\x01\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x17\x11\x00\x00\x07\x01\x00\x00\x04\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x11\x00\x00\x70\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x11\x00\x00\x74\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x11\x00\x00\x02\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x11\x00\x00\x17\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x11\x00\x00\x79\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x11\x00\x00\x04\x11\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x17\x01\x00\x00\x07\x01\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x04\x11\x00\x00\x17\x01\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x3B\x0D\x00\x00\x7A\x03\x00\x00\x17\x01\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x7A\x0D\x00\x00\x17\x11\x00\x00\x00\x0F\x00\x00\x00\x09\x00\x00\x01\x09\x00\x00\x02\x09\x00\x00\x03\x09\x00\x00\x02\x01\x00\x00\x0E\x01\x00\x00\x00\x0B\x00\x00\x01\x0B\x00\x00\x02\x0B\x00\x00\x0D\x01\x00\x00\x56\x03\x00\x00\x5B\x03\x00\x00\x5E\x03\x00\x00\x63\x03\x00\x00\x05\x01\x00\x00\x00\x01\x00\x00\x10\x01',
_globals = (b'\xFF\xFF\xFF\x0BSFC_FILE_TRUNCATE',4224,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_INFO',4136,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_MAJOR',4145,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_MAJOR_COUNT',4144,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_SUBTYPE',4147,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_SUBTYPE_COUNT',4146,b'\xFF\xFF\xFF\x0BSFC_GET_LIB_VERSION',4096,b'\xFF\xFF\xFF\x0BSFC_GET_LOG_INFO',4097,b'\xFF\xFF\xFF\x0BSFC_SET_CLIPPING',4288,b'\xFF\xFF\xFF\x0BSFC_SET_SCALE_FLOAT_INT_READ',4116,b'\xFF\xFF\xFF\x0BSFC_SET_SCALE_INT_FLOAT_WRITE',4117,b'\xFF\xFF\xFF\x0BSFM_RDWR',48,b'\xFF\xFF\xFF\x0BSFM_READ',16,b'\xFF\xFF\xFF\x0BSFM_WRITE',32,b'\xFF\xFF\xFF\x0BSF_FALSE',0,b'\xFF\xFF\xFF\x0BSF_FORMAT_ENDMASK',805306368,b'\xFF\xFF\xFF\x0BSF_FORMAT_SUBMASK',65535,b'\xFF\xFF\xFF\x0BSF_FORMAT_TYPEMASK',268369920,b'\xFF\xFF\xFF\x0BSF_TRUE',1,b'\x00\x00\x25\x23sf_close',0,b'\x00\x00\x32\x23sf_command',0,b'\x00\x00\x25\x23sf_error',0,b'\x00\x00\x1D\x23sf_error_number',0,b'\x00\x00\x28\x23sf_error_str',0,b'\x00\x00\x22\x23sf_format_check',0,b'\x00\x00\x19\x23sf_get_string',0,b'\x00\x00\x06\x23sf_open',0,b'\x00\x00\x0B\x23sf_open_fd',0,b'\x00\x00\x00\x23sf_open_virtual',0,b'\x00\x00\x25\x23sf_perror',0,b'\x00\x00\x38\x23sf_read_double',0,b'\x00\x00\x3D\x23sf_read_float',0,b'\x00\x00\x42\x23sf_read_int',0,b'\x00\x00\x51\x23sf_read_raw',0,b'\x00\x00\x4C\x23sf_read_short',0,b'\x00\x00\x51\x23sf_readf_double',0,b'\x00\x00\x51\x23sf_readf_float',0,b'\x00\x00\x51\x23sf_readf_int',0,b'\x00\x00\x51\x23sf_readf_short',0,b'\x00\x00\x47\x23sf_seek',0,b'\x00\x00\x2D\x23sf_set_string',0,b'\x00\x00\x16\x23sf_strerror',0,b'\x00\x00\x20\x23sf_version_string',0,b'\x00\x00\x11\x23sf_wchar_open',0,b'\x00\x00\x38\x23sf_write_double',0,b'\x00\x00\x3D\x23sf_write_float',0,b'\x00\x00\x42\x23sf_write_int',0,b'\x00\x00\x51\x23sf_write_raw',0,b'\x00\x00\x4C\x23sf_write_short',0,b'\x00\x00\x68\x23sf_write_sync',0,b'\x00\x00\x51\x23sf_writef_double',0,b'\x00\x00\x51\x23sf_writef_float',0,b'\x00\x00\x51\x23sf_writef_int',0,b'\x00\x00\x51\x23sf_writef_short',0),
_struct_unions = ((b'\x00\x00\x00\x6B\x00\x00\x00\x02SF_FORMAT_INFO',b'\x00\x00\x02\x11format',b'\x00\x00\x07\x11name',b'\x00\x00\x07\x11extension'),(b'\x00\x00\x00\x6C\x00\x00\x00\x02SF_INFO',b'\x00\x00\x3B\x11frames',b'\x00\x00\x02\x11samplerate',b'\x00\x00\x02\x11channels',b'\x00\x00\x02\x11format',b'\x00\x00\x02\x11sections',b'\x00\x00\x02\x11seekable'),(b'\x00\x00\x00\x6D\x00\x00\x00\x02SF_VIRTUAL_IO',b'\x00\x00\x76\x11get_filelen',b'\x00\x00\x75\x11seek',b'\x00\x00\x77\x11read',b'\x00\x00\x78\x11write',b'\x00\x00\x76\x11tell'),(b'\x00\x00\x00\x6E\x00\x00\x00\x10SNDFILE_tag',)),
_enums = (b'\x00\x00\x00\x71\x00\x00\x00\x16$1\x00SF_FORMAT_SUBMASK,SF_FORMAT_TYPEMASK,SF_FORMAT_ENDMASK',b'\x00\x00\x00\x72\x00\x00\x00\x16$2\x00SFC_GET_LIB_VERSION,SFC_GET_LOG_INFO,SFC_GET_FORMAT_INFO,SFC_GET_FORMAT_MAJOR_COUNT,SFC_GET_FORMAT_MAJOR,SFC_GET_FORMAT_SUBTYPE_COUNT,SFC_GET_FORMAT_SUBTYPE,SFC_FILE_TRUNCATE,SFC_SET_CLIPPING,SFC_SET_SCALE_FLOAT_INT_READ,SFC_SET_SCALE_INT_FLOAT_WRITE',b'\x00\x00\x00\x73\x00\x00\x00\x16$3\x00SF_FALSE,SF_TRUE,SFM_READ,SFM_WRITE,SFM_RDWR'),
_typenames = (b'\x00\x00\x00\x6BSF_FORMAT_INFO',b'\x00\x00\x00\x6CSF_INFO',b'\x00\x00\x00\x6DSF_VIRTUAL_IO',b'\x00\x00\x00\x6ESNDFILE',b'\x00\x00\x00\x3Bsf_count_t',b'\x00\x00\x00\x76sf_vio_get_filelen',b'\x00\x00\x00\x77sf_vio_read',b'\x00\x00\x00\x75sf_vio_seek',b'\x00\x00\x00\x76sf_vio_tell',b'\x00\x00\x00\x78sf_vio_write'),
)
| 463.416667
| 2,032
| 0.769286
| 1,229
| 5,561
| 3.3214
| 0.142392
| 0.365997
| 0.114895
| 0.066634
| 0.596276
| 0.512984
| 0.44586
| 0.32827
| 0.285154
| 0.285154
| 0
| 0.295153
| 0.009351
| 5,561
| 11
| 2,033
| 505.545455
| 0.445816
| 0.003417
| 0
| 0
| 1
| 0.444444
| 0.876354
| 0.860108
| 0
| 1
| 0.001083
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6870f58fa5a4098dd97c957e5fe0e9d08383e6eb
| 47
|
py
|
Python
|
rerldo/command_line.py
|
mark-solo/python_packaging_practice
|
93619ee22f0ae0b8a648e9695d114e5b856cdf73
|
[
"MIT"
] | null | null | null |
rerldo/command_line.py
|
mark-solo/python_packaging_practice
|
93619ee22f0ae0b8a648e9695d114e5b856cdf73
|
[
"MIT"
] | null | null | null |
rerldo/command_line.py
|
mark-solo/python_packaging_practice
|
93619ee22f0ae0b8a648e9695d114e5b856cdf73
|
[
"MIT"
] | null | null | null |
import rerldo
def main():
print(rerldo.yup())
| 11.75
| 20
| 0.702128
| 7
| 47
| 4.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 47
| 4
| 20
| 11.75
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d7a4d0e7f452ccd3c777fc365f6a86b15e073d01
| 44,323
|
py
|
Python
|
loose_seal/filtersQC_vgat_ctrl.py
|
opavon/PAG_ephys_analysis
|
a9a27ad359afacb497b14ff5698421b2816f38a2
|
[
"MIT"
] | null | null | null |
loose_seal/filtersQC_vgat_ctrl.py
|
opavon/PAG_ephys_analysis
|
a9a27ad359afacb497b14ff5698421b2816f38a2
|
[
"MIT"
] | null | null | null |
loose_seal/filtersQC_vgat_ctrl.py
|
opavon/PAG_ephys_analysis
|
a9a27ad359afacb497b14ff5698421b2816f38a2
|
[
"MIT"
] | null | null | null |
# Defaults
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
##### dlpag #####
# dlpag_vgat_171113_c5_LIAI_OP_clear_VC_1
# prominence (75-250), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = 20, QC_wh_max = float('inf'),
QC_pw_min = 2, QC_pw_max = 5,
QC_ph_min = 50, QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -85,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_171114_c4_LIAM_OP_clear_VC_1
# prominence (75-200), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = 0, QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_171117_c1_LIAR_OP_clear_VC_2
# prominence (75-200), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -80,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = 12
)
# dlpag_vgat_171123_c2_LIBA_OP_clear_VC_1
# prominence (50-200), peak (NA)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = -50, QC_pb_max = 0,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_171123_c8_LIBG_OP_clear_VC_1
# prominence (45-250), peak (-250-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -100,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_171214_c3_LIBR_OP_clear_VC_2
# prominence (100-300), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_180208_c1_LICB_OP_clear_VC_2
# prominence (42, 150), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = 3, QC_rb_max = float('inf')
)
# dlpag_vgat_180208_c2_LICC_OP_clear_VC_1
# prominence (26-150), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = 25,
QC_ph_min = 0, QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -17,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_180212_c1_LICF_OP_clear_VC_1
# prominence (100-400), peak (-400-0), baselined -75 to -25 (instead of -100 to -25)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_180212_c2_LICG_OP_clear_VC_1
# prominence (100-400), peak (-400-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_190201_c1_LICN_OP_VC_clear_nointerval_2
# prominence (80-200), peak (-400-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = 0,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_190204_c1_LICT_OP_VC_clear_nointerval_3
# prominence (50-105), peak (-85-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = 57, QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = 8,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -45,
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = 5, QC_rb_max = float('inf')
)
# dlpag_vgat_200717_c1_LICX_OP_VC_clear_nointerval_2
# prominence (35-150), peak (NA)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 8, QC_pw_max = 15,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dlpag_vgat_200717_c1_LICX_OP_VC_clear_nointerval_3
# prominence (100-400), peak (-330-0), had to use 8 sample points to find spike end
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 8, QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = 20
)
# dlpag_vgat_200720_c2_LIDG_OP_VC_clear_nointerval_1
# prominence (50-125), peak (-200-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = 20
)
# dlpag_vgat_201029_c1_LIEB_OP_VC_clear_nointerval_1
# prominence (50-150), peak (-125-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = 5, QC_wh_max = float('inf'),
QC_pw_min = 8.5, QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 12, QC_lb_max = float('inf'),
QC_rb_min = 8, QC_rb_max = float('inf')
)
# dlpag_vgat_201105_c1_LIEF_OP_VC_clear_nointerval_1
# prominence (30-70), peak (10-100)
# use -sweep_IB_concatenated instead of sweep_IB_concatenated in findPeaks as spikes are larger in the positive side and otherwise they don't get detected
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 7, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = 10, QC_pb_max = float('inf'),
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
##### dmpag #####
# dmpag_vgat_171114_c2_LIAK_OP_clear_VC_1
# prominence (35-200), peak (-140-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171117_c4_LIAU_OP_clear_VC_1
# prominence (75-500), peak (-420-320)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 2.5, QC_pw_max = 10,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171117_c5_LIAV_OP_clear_VC_1
# prominence (40-300), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 2, QC_pw_max = 8,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171123_c4_LIBC_OP_clear_VC_1
# prominence (100-300), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171123_c5_LIBD_OP_clear_VC_1
# prominence (50-350), peak (-400-225)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 25,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171130_c3_LIBJ_OP_clear_VC_2
# prominence (40-150), peak (-200-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 40,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171218_c3_LIBV_OP_clear_VC_2
# prominence (75-300), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = 10,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_171218_c7_LIBZ_OP_clear_VC_2
# prominence (150-350), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_190124_c1_LICI_OP_VC_clear_nointerval_2
# prominence (100-160), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 8, QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_190201_c4_LICQ_OP_VC_clear_nointerval_1
# prominence (100-350), peak (-400-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = 10,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = float('inf')
)
# dmpag_vgat_190201_c4_LICQ_OP_VC_clear_nointerval_2
# prominence (40-250), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 10,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = 10
)
# dmpag_vgat_200717_c8_LIDB_OP_VC_clear_nointerval_1
# prominence (38-95), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3.35, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 5.5, QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = 20
)
# dmpag_vgat_201008_c1_LIDT_OP_VC_clear_nointerval_1
# prominence (100-350), peak (-350-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_201008_c1_LIDT_OP_VC_clear_nointerval_2
# prominence (50-150), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = 11.5
)
# dmpag_vgat_201029_c1_LIEB_OP_VC_clear_nointerval_2
# prominence (40-105), peak (-74-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = 50,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 4, QC_lb_max = float('inf'),
QC_rb_min = 5, QC_rb_max = float('inf')
)
# dmpag_vgat_201029_c3_LIED_OP_VC_clear_nointerval_1
# prominence (39-100), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 5, QC_pw_max = 10,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = float('inf')
)
# dmpag_vgat_201029_c3_LIED_OP_VC_clear_nointerval_2
# prominence (50-100), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = 51.8, QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = 5, QC_rb_max = float('inf')
)
# dmpag_vgat_201102_c1_LIEE_OP_VC_clear_nointerval_1
# prominence (100-300), peak (-300-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = 100,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 10, QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# dmpag_vgat_201126_c1_LIEI_OP_VC_clear_nointerval_1
# prominence (39-100), peak (-200-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3.9, QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 6.1, QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = 65
)
# dmpag_vgat_201202_c1_LIEM_OP_VC_clear_nointerval_1
# prominence (60-125), peak (-150-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 4, QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = float('inf')
)
# dmpag_vgat_201202_c2_LIEN_OP_VC_clear_nointerval_2
# prominence (280-500), peak (-500-100)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = 50,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = 4, QC_rb_max = float('inf')
)
# dmpag_vgat_201202_c3_LIEO_OP_VC_clear_nointerval_1
# prominence (40-92), peak (-100-0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4.5, QC_pw_max = 15,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
##### lpag #####
# lpag_vgat_201125_c4_LDIBL_OP_VC_clear_nointerval_1
# prominence (50, 150), peak (-200, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = 8,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_171130_c5_LIBL_OP_clear_VC_1
# prominence (75, 175), peak (-200, -75)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -20,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_171211_c2_LIBN_OP_clear_VC_1
# prominence (60, 150), peak (-100, -54)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_171214_c2_LIBQ_OP_clear_VC_1
# prominence (75, 150), peak (-200, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_171218_c1_LIBT_OP_clear_VC_1
# prominence (95, 225), peak (-160, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = 7.5,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = 70
)
# lpag_vgat_171218_c4_LIBW_OP_clear_VC_2
# prominence (100, 260), peak (-208, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_190201_c3_LICP_OP_VC_clear_nointerval_1
# prominence (75, 165), peak (-200, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3.4, QC_pw_max = 5.8,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -73,
QC_lb_min = 5, QC_lb_max = float('inf'),
QC_rb_min = 5, QC_rb_max = float('inf')
)
# lpag_vgat_190204_c2_LICU_OP_VC_clear_nointerval_3
# prominence (50, 150), peak (-200, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3.5, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_190204_c4_LICW_OP_VC_clear_nointerval_1
# prominence (100, 300), peak (-300, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# lpag_vgat_201126_c2_LIEJ_OP_VC_clear_nointerval_1
# prominence (25, 82), peak (-58, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 5, QC_pw_max = 22,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -10,
QC_lb_min = 7, QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
##### vlpag #####
# vlpag_vgat_171027_c4_LIAD_OP_clear_VC_1
# prominence (225, 375), peak (-400, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 25,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171113_c3_LIAG_OP_clear_VC_1
# prominence (30, 100), peak (-100, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 4, QC_pw_max = 25,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171114_c1_LIAJ_OP_clear_VC_2
# prominence (100, 250), peak (-250, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171114_c3_LIAL_OP_clear_VC_1
# prominence (20, 100), peak (NA)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 6, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = -20,
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171123_c3_LIBB_OP_clear_VC_1
# prominence (100, 350), peak (-350, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171123_c7_LIBF_OP_clear_VC_2
# prominence (180, 350), peak (-300, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = 20,
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171130_c1_LIBH_OP_clear_VC_1
# prominence (200, 400), peak (-400, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171130_c1_LIBH_OP_clear_VC_2
# prominence (100, 200), peak (-200, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171218_c5_LIBX_OP_clear_VC_1
# prominence (220, 325), peak (-300, -150)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_171218_c8_LICA_OP_clear_VC_1
# prominence (150, 300), peak (-300, -100)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = float('-inf'), QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
# vlpag_vgat_190124_c2_LICJ_OP_VC_clear_nointerval_1
# prominence (85, 200), peak (-150, 0)
peaks_QC, cut_spikes_QC, cut_spikes_holding_QC, cut_spikes_baselined_QC, parameters_QC = spikesQC(
file_name, peaks, peaks_properties,
cut_spikes, cut_spikes_holding, cut_spikes_baselined,
filter_by = ['p', 'wh', 'pw', 'ph', 'pb', 'lb', 'rb'],
QC_p_min = float('-inf'), QC_p_max = float('inf'),
QC_wh_min = float('-inf'), QC_wh_max = float('inf'),
QC_pw_min = 3.7, QC_pw_max = float('inf'),
QC_ph_min = float('-inf'), QC_ph_max = float('inf'),
QC_pb_min = float('-inf'), QC_pb_max = float('inf'),
QC_lb_min = float('-inf'), QC_lb_max = float('inf'),
QC_rb_min = float('-inf'), QC_rb_max = float('inf')
)
| 47.865011
| 154
| 0.659229
| 7,456
| 44,323
| 3.473981
| 0.02897
| 0.226701
| 0.262914
| 0.178172
| 0.957957
| 0.94595
| 0.922786
| 0.916609
| 0.911628
| 0.903174
| 0
| 0.033165
| 0.163256
| 44,323
| 926
| 155
| 47.865011
| 0.665247
| 0.115719
| 0
| 0.838798
| 0
| 0
| 0.085827
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
d7f5a22d04cde240f31cda4bae69a0e94594f49a
| 123
|
py
|
Python
|
dummy1.py
|
amirostadi/dummy-rep
|
ce8ea553faa72935fd9e8e3207e6760a1ca4d95c
|
[
"MIT"
] | null | null | null |
dummy1.py
|
amirostadi/dummy-rep
|
ce8ea553faa72935fd9e8e3207e6760a1ca4d95c
|
[
"MIT"
] | null | null | null |
dummy1.py
|
amirostadi/dummy-rep
|
ce8ea553faa72935fd9e8e3207e6760a1ca4d95c
|
[
"MIT"
] | null | null | null |
#this is a test file to check my github is working
def print_dummy()
print("dummy")
print("dummy dummy")
print_dummy()
| 15.375
| 50
| 0.723577
| 21
| 123
| 4.142857
| 0.619048
| 0.45977
| 0.517241
| 0.45977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 123
| 7
| 51
| 17.571429
| 0.852941
| 0.398374
| 0
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
0be1e6042b6414574d83afb22ca653a85f7d585d
| 105,924
|
py
|
Python
|
dlkit/abstract_osid/resource/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/resource/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/resource/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of resource abstract base class managers."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class ResourceProfile:
"""The resource profile describes interoperability among resource services."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def supports_visible_federation(self):
"""Tests if federation is visible.
:return: ``true`` if visible federation is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_lookup(self):
"""Tests if resource lookup is supported.
:return: ``true`` if resource lookup is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_query(self):
"""Tests if resource query is supported.
:return: ``true`` if resource query is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_search(self):
"""Tests if resource search is supported.
:return: ``true`` if resource search is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_admin(self):
"""Tests if resource administration is supported.
:return: ``true`` if resource administration is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_notification(self):
"""Tests if resource notification is supported.
Messages may be sent when resources are created, modified, or
deleted.
:return: ``true`` if resource notification is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_bin(self):
"""Tests if retrieving mappings of resource and bins is supported.
:return: ``true`` if resource bin mapping retrieval is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_bin_assignment(self):
"""Tests if managing mappings of resource and bins is supported.
:return: ``true`` if resource bin assignment is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_smart_bin(self):
"""Tests if resource smart bins are available.
:return: ``true`` if resource smart bins are supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_membership(self):
"""Tests if membership queries are supported.
:return: ``true`` if membership queries are supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_group(self):
"""Tests if group resources are supported.
:return: ``true`` if group resources are supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_group_assignment(self):
"""Tests if group resource assignment is supported.
:return: ``true`` if group resource assignment is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_group_notification(self):
"""Tests if group resource notification is supported.
:return: ``true`` if group resource notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_group_hierarchy(self):
"""Tests if a group resource hierarchy service is supported.
:return: ``true`` if group resource hierarchy is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_agent(self):
"""Tests if retrieving mappings of resource and agents is supported.
:return: ``true`` if resource agent mapping retrieval is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_agent_assignment(self):
"""Tests if managing mappings of resources and agents is supported.
:return: ``true`` if resource agent assignment is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_lookup(self):
"""Tests if looking up resource relationships is supported.
:return: ``true`` if resource relationships lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_query(self):
"""Tests if querying resource relationships is supported.
:return: ``true`` if resource relationships query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_search(self):
"""Tests if searching resource relationships is supported.
:return: ``true`` if resource relationships search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_admin(self):
"""Tests if a resource relationshipsadministrative service is supported.
:return: ``true`` if resource relationships administration is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_notification(self):
"""Tests if a resource relationshipsnotification service is supported.
:return: ``true`` if resource relationships notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_bin(self):
"""Tests if retrieving mappings of resource relationships and bins is supported.
:return: ``true`` if resource relationship bin mapping retrieval is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_bin_assignment(self):
"""Tests if managing mappings of resource relationships and bins is supported.
:return: ``true`` if resource relationship bin assignment is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_relationship_smart_bin(self):
"""Tests if resource relationship smart bins are available.
:return: ``true`` if resource relationship smart bins are supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_lookup(self):
"""Tests if bin lookup is supported.
:return: ``true`` if bin lookup is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_query(self):
"""Tests if bin query is supported.
:return: ``true`` if bin query is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_search(self):
"""Tests if bin search is supported.
:return: ``true`` if bin search is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_admin(self):
"""Tests if bin administration is supported.
:return: ``true`` if bin administration is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_notification(self):
"""Tests if bin notification is supported.
Messages may be sent when ``Bin`` objects are created, deleted
or updated. Notifications for resources within bins are sent via
the resource notification session.
:return: ``true`` if bin notification is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_hierarchy(self):
"""Tests if a bin hierarchy traversal is supported.
:return: ``true`` if a bin hierarchy traversal is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bin_hierarchy_design(self):
"""Tests if a bin hierarchy design is supported.
:return: ``true`` if a bin hierarchy design is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_batch(self):
"""Tests if a resource batch service is available.
:return: ``true`` if a resource batch service is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_demographic(self):
"""Tests if a resource demographic service is available.
:return: ``true`` if a resource demographic service is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_record_types(self):
"""Gets all the resource record types supported.
:return: the list of supported resource record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
resource_record_types = property(fget=get_resource_record_types)
@abc.abstractmethod
def supports_resource_record_type(self, resource_record_type):
"""Tests if a given resource record type is supported.
:param resource_record_type: the resource type
:type resource_record_type: ``osid.type.Type``
:return: ``true`` if the resource record type is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_search_record_types(self):
"""Gets all the resource search record types supported.
:return: the list of supported resource search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
resource_search_record_types = property(fget=get_resource_search_record_types)
@abc.abstractmethod
def supports_resource_search_record_type(self, resource_search_record_type):
"""Tests if a given resource search type is supported.
:param resource_search_record_type: the resource search type
:type resource_search_record_type: ``osid.type.Type``
:return: ``true`` if the resource search record type is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_relationship_record_types(self):
"""Gets the supported ``ResourceRelationship`` record types.
:return: a list containing the supported ``ResourceRelationship`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
resource_relationship_record_types = property(fget=get_resource_relationship_record_types)
@abc.abstractmethod
def supports_resource_relationship_record_type(self, resource_relationship_record_type):
"""Tests if the given ``ResourceRelationship`` record type is supported.
:param resource_relationship_record_type: a ``Type`` indicating a ``ResourceRelationship`` record type
:type resource_relationship_record_type: ``osid.type.Type``
:return: ``true`` if the given type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_relationship_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_relationship_search_record_types(self):
"""Gets the supported ``ResourceRelationship`` search record types.
:return: a list containing the supported ``ResourceRelationship`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
resource_relationship_search_record_types = property(fget=get_resource_relationship_search_record_types)
@abc.abstractmethod
def supports_resource_relationship_search_record_type(self, resource_relationship_search_record_type):
"""Tests if the given ``ResourceRelationship`` search record type is supported.
:param resource_relationship_search_record_type: a ``Type`` indicating a ``ResourceRelationship`` search record type
:type resource_relationship_search_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_relationship_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bin_record_types(self):
"""Gets all the bin record types supported.
:return: the list of supported bin record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
bin_record_types = property(fget=get_bin_record_types)
@abc.abstractmethod
def supports_bin_record_type(self, bin_record_type):
"""Tests if a given bin record type is supported.
:param bin_record_type: the bin record type
:type bin_record_type: ``osid.type.Type``
:return: ``true`` if the bin record type is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bin_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bin_search_record_types(self):
"""Gets all the bin search record types supported.
:return: the list of supported bin search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
bin_search_record_types = property(fget=get_bin_search_record_types)
@abc.abstractmethod
def supports_bin_search_record_type(self, bin_search_record_type):
"""Tests if a given bin search record type is supported.
:param bin_search_record_type: the bin search record type
:type bin_search_record_type: ``osid.type.Type``
:return: ``true`` if the bin search record type is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bin_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
class ResourceManager:
"""The resource manager provides access to resource lookup and creation sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``ResourceLookupSession:`` a session to retrieve resources
* ``ResourceQuerySession:`` a session to query resources
* ``ResourceSearchSession:`` a session to search for resources
* ``ResourceAdminSession:`` a session to create and delete
resources
* ``ResourceNotificationSession:`` a session to receive
notifications pertaining to resource changes
* ``ResourceBinSession:`` a session to look up resource to bin
mappings
* ``ResourceBinAssignmentSession:`` a session to manage resource
to bin mappings
* ``ResourceSmartBinSession:`` a session to manage smart resource
bins
* ``MembershipSession:`` a session to query memberships
* ``GroupSession:`` a session to retrieve group memberships
* ``GroupAssignmentSession:`` a session to manage groups
* ``GroupNotificationSession:`` a session to retrieve
notifications on changes to group membership
* ``GroupHierarchySession:`` a session to view a group hierarchy
* ``RsourceAgentSession:`` a session to retrieve ``Resource`` and
``Agent`` mappings
* ``ResourceAgentAssignmentSession:`` a session to manage
``Resource`` and ``Agent`` mappings
* ``ResourceRelationshipLookupSession:`` a session to retrieve
resource relationships
* ``ResourceRelationshipQuerySession:`` a session to query for
resource relationships
* ``ResourceRelationshipSearchSession:`` a session to search for
resource relationships
* ``ResourceRelationshipAdminSession:`` a session to create and
delete resource relationships
* ``ResourceRelationshipNotificationSession:`` a session to
receive notifications pertaining to resource relationshipchanges
* ``ResourceRelationshipBinSession:`` a session to look up
resource relationship to bin mappings
* ``ResourceRelationshipBinAssignmentSession:`` a session to
manage resource relationship to bin mappings
* ``ResourceRelationshipSmartBinSession:`` a session to manage
smart resource relationship bins
* ``BinLookupSession: a`` session to retrieve bins
* ``BinQuerySession:`` a session to query bins
* ``BinSearchSession:`` a session to search for bins
* ``BinAdminSession:`` a session to create, update and delete bins
* ``BinNotificationSession:`` a session to receive notifications
pertaining to changes in bins
* ``BinHierarchySession:`` a session to traverse bin hierarchies
* ``BinHierarchyDesignSession:`` a session to manage bin
hierarchies
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_resource_lookup_session(self):
"""Gets the ``OsidSession`` associated with the resource lookup service.
:return: ``a ResourceLookupSession``
:rtype: ``osid.resource.ResourceLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_lookup()`` is ``true``.*
"""
return # osid.resource.ResourceLookupSession
resource_lookup_session = property(fget=get_resource_lookup_session)
@abc.abstractmethod
def get_resource_lookup_session_for_bin(self, bin_id):
"""Gets the ``OsidSession`` associated with the resource lookup service for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a ResourceLookupSession``
:rtype: ``osid.resource.ResourceLookupSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceLookupSession
@abc.abstractmethod
def get_resource_query_session(self):
"""Gets a resource query session.
:return: ``a ResourceQuerySession``
:rtype: ``osid.resource.ResourceQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_query()`` is ``true``.*
"""
return # osid.resource.ResourceQuerySession
resource_query_session = property(fget=get_resource_query_session)
@abc.abstractmethod
def get_resource_query_session_for_bin(self, bin_id):
"""Gets a resource query session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a ResourceQuerySession``
:rtype: ``osid.resource.ResourceQuerySession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceQuerySession
@abc.abstractmethod
def get_resource_search_session(self):
"""Gets a resource search session.
:return: ``a ResourceSearchSession``
:rtype: ``osid.resource.ResourceSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_search()`` is ``true``.*
"""
return # osid.resource.ResourceSearchSession
resource_search_session = property(fget=get_resource_search_session)
@abc.abstractmethod
def get_resource_search_session_for_bin(self, bin_id):
"""Gets a resource search session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a ResourceSearchSession``
:rtype: ``osid.resource.ResourceSearchSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceSearchSession
@abc.abstractmethod
def get_resource_admin_session(self):
"""Gets a resource administration session for creating, updating and deleting resources.
:return: ``a ResourceAdminSession``
:rtype: ``osid.resource.ResourceAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_admin()`` is ``true``.*
"""
return # osid.resource.ResourceAdminSession
resource_admin_session = property(fget=get_resource_admin_session)
@abc.abstractmethod
def get_resource_admin_session_for_bin(self, bin_id):
"""Gets a resource administration session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a ResourceAdminSession``
:rtype: ``osid.resource.ResourceAdminSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceAdminSession
@abc.abstractmethod
def get_resource_notification_session(self, resource_receiver):
"""Gets the notification session for notifications pertaining to resource changes.
:param resource_receiver: the notification callback
:type resource_receiver: ``osid.resource.ResourceReceiver``
:return: ``a ResourceNotificationSession``
:rtype: ``osid.resource.ResourceNotificationSession``
:raise: ``NullArgument`` -- ``resource_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_notification()`` is ``true``.*
"""
return # osid.resource.ResourceNotificationSession
@abc.abstractmethod
def get_resource_notification_session_for_bin(self, resource_receiver, bin_id):
"""Gets the resource notification session for the given bin.
:param resource_receiver: the notification callback
:type resource_receiver: ``osid.resource.ResourceReceiver``
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a ResourceNotificationSession``
:rtype: ``osid.resource.ResourceNotificationSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``resource_receiver`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceNotificationSession
@abc.abstractmethod
def get_resource_bin_session(self):
"""Gets the session for retrieving resource to bin mappings.
:return: a ``ResourceBinSession``
:rtype: ``osid.resource.ResourceBinSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_bin()`` is ``true``.*
"""
return # osid.resource.ResourceBinSession
resource_bin_session = property(fget=get_resource_bin_session)
@abc.abstractmethod
def get_resource_bin_assignment_session(self):
"""Gets the session for assigning resource to bin mappings.
:return: a ``ResourceBinAssignmentSession``
:rtype: ``osid.resource.ResourceBinAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_bin_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_bin_assignment()`` is ``true``.*
"""
return # osid.resource.ResourceBinAssignmentSession
resource_bin_assignment_session = property(fget=get_resource_bin_assignment_session)
@abc.abstractmethod
def get_resource_smart_bin_session(self, bin_id):
"""Gets the session for managing dynamic resource bins.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``ResourceSmartBinSession``
:rtype: ``osid.resource.ResourceSmartBinSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_smart_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_smart_bin()`` is ``true``.*
"""
return # osid.resource.ResourceSmartBinSession
@abc.abstractmethod
def get_membership_session(self):
"""Gets the session for querying memberships.
:return: a ``MembershipSession``
:rtype: ``osid.resource.MembershipSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_membership()`` is ``false``
*compliance: optional -- This method must be implemented if
``support_membership()`` is ``true``.*
"""
return # osid.resource.MembershipSession
membership_session = property(fget=get_membership_session)
@abc.abstractmethod
def get_membership_session_for_bin(self, bin_id):
"""Gets a resource membership session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a MembershipSession``
:rtype: ``osid.resource.MembershipSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_membership()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_membership()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.resource.MembershipSession
@abc.abstractmethod
def get_group_session(self):
"""Gets the session for retrieving gropup memberships.
:return: a ``GroupSession``
:rtype: ``osid.resource.GroupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group()`` is ``true``.*
"""
return # osid.resource.GroupSession
group_session = property(fget=get_group_session)
@abc.abstractmethod
def get_group_session_for_bin(self, bin_id):
"""Gets a group session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``GroupSession``
:rtype: ``osid.resource.GroupSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group()`` and ``supports_visible_federation()`` are
``true``.*
"""
return # osid.resource.GroupSession
@abc.abstractmethod
def get_group_assignment_session(self):
"""Gets the session for assigning resources to groups.
:return: a ``GroupAssignmentSession``
:rtype: ``osid.resource.GroupAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_assignment()`` is ``true``.*
"""
return # osid.resource.GroupAssignmentSession
group_assignment_session = property(fget=get_group_assignment_session)
@abc.abstractmethod
def get_group_assignment_session_for_bin(self, bin_id):
"""Gets a group assignment session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``GroupAssignmentSession``
:rtype: ``osid.resource.GroupAssignmentSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_assignment()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.GroupAssignmentSession
@abc.abstractmethod
def get_group_notification_session(self, group_rceeiver):
"""Gets the notification session for notifications pertaining to resource changes.
:param group_rceeiver: the notification callback
:type group_rceeiver: ``osid.resource.GroupReceiver``
:return: ``a GroupNotificationSession``
:rtype: ``osid.resource.GroupNotificationSession``
:raise: ``NullArgument`` -- ``group_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_notification()`` is ``true``.*
"""
return # osid.resource.GroupNotificationSession
@abc.abstractmethod
def get_group_notification_session_for_bin(self, group_rceeiver, bin_id):
"""Gets the group notification session for the given bin.
:param group_rceeiver: the notification callback
:type group_rceeiver: ``osid.resource.GroupReceiver``
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: ``a GroupNotificationSession``
:rtype: ``osid.resource.GroupNotificationSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``group_receiver`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_group_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.GroupNotificationSession
@abc.abstractmethod
def get_group_hierarchy_session(self):
"""Gets a session for retrieving gropup hierarchies.
:return: ``a GroupHierarchySession``
:rtype: ``osid.resource.GroupHierarchySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_hierarchy()`` is ``true``.*
"""
return # osid.resource.GroupHierarchySession
group_hierarchy_session = property(fget=get_group_hierarchy_session)
@abc.abstractmethod
def get_group_hierarchy_session_for_bin(self, bin_id):
"""Gets a group hierarchy session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``GroupHierarchySession``
:rtype: ``osid.resource.GroupHierarchySession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_hierarchy()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_hierarchy()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.GroupHierarchySession
@abc.abstractmethod
def get_resource_agent_session(self):
"""Gets the session for retrieving resource agent mappings.
:return: a ``ResourceAgentSession``
:rtype: ``osid.resource.ResourceAgentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent()`` is ``true``.*
"""
return # osid.resource.ResourceAgentSession
resource_agent_session = property(fget=get_resource_agent_session)
@abc.abstractmethod
def get_resource_agent_session_for_bin(self, bin_id):
"""Gets a resource agent session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``ResourceAgentSession``
:rtype: ``osid.resource.ResourceAgentSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceAgentSession
@abc.abstractmethod
def get_resource_agent_assignment_session(self):
"""Gets the session for assigning agents to resources.
:return: a ``ResourceAgentAssignmentSession``
:rtype: ``osid.resource.ResourceAgentAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent_assignment()`` is ``true``.*
"""
return # osid.resource.ResourceAgentAssignmentSession
resource_agent_assignment_session = property(fget=get_resource_agent_assignment_session)
@abc.abstractmethod
def get_resource_agent_assignment_session_for_bin(self, bin_id):
"""Gets a resource agent session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``ResourceAgentAssignmentSession``
:rtype: ``osid.resource.ResourceAgentAssignmentSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent_assignment()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceAgentAssignmentSession
@abc.abstractmethod
def get_resource_relationship_lookup_session(self):
"""Gets the ``OsidSession`` associated with the resource relationship lookup service.
:return: a ``ResourceRelationshipLookupSession``
:rtype: ``osid.resource.ResourceRelationshipLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_lookup()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipLookupSession
resource_relationship_lookup_session = property(fget=get_resource_relationship_lookup_session)
@abc.abstractmethod
def get_resource_relationship_lookup_session_for_bin(self, bin_id):
"""Gets the ``OsidSession`` associated with the resource relationship lookup service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: a ``ResourceRelationshipLookupSession``
:rtype: ``osid.resource.ResourceRelationshipLookupSession``
:raise: ``NotFound`` -- no ``Bin`` found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipLookupSession
@abc.abstractmethod
def get_resource_relationship_query_session(self):
"""Gets the ``OsidSession`` associated with the resource relationship query service.
:return: a ``ResourceRelationshipQuerySession``
:rtype: ``osid.resource.ResourceRelationshipQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_query()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipQuerySession
resource_relationship_query_session = property(fget=get_resource_relationship_query_session)
@abc.abstractmethod
def get_resource_relationship_query_session_for_bin(self, bin_id):
"""Gets the ``OsidSession`` associated with the resource relationship query service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: a ``ResourceRelationshipQuerySession``
:rtype: ``osid.resource.ResourceRelationshipQuerySession``
:raise: ``NotFound`` -- no ``Bin`` found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_query()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipQuerySession
@abc.abstractmethod
def get_resource_relationship_search_session(self):
"""Gets the ``OsidSession`` associated with the resource relationship search service.
:return: a ``ResourceRelationshipSearchSession``
:rtype: ``osid.resource.ResourceRelationshipSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_search()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipSearchSession
resource_relationship_search_session = property(fget=get_resource_relationship_search_session)
@abc.abstractmethod
def get_resource_relationship_search_session_for_bin(self, bin_id):
"""Gets the ``OsidSession`` associated with the resource relationship search service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: a ``ResourceRelationshipSearchSession``
:rtype: ``osid.resource.ResourceRelationshipSearchSession``
:raise: ``NotFound`` -- no bin found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_search()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipSearchSession
@abc.abstractmethod
def get_resource_relationship_admin_session(self):
"""Gets the ``OsidSession`` associated with the resource relationship administration service.
:return: a ``ResourceRelationshipAdminSession``
:rtype: ``osid.resource.ResourceRelationshipAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_admin()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipAdminSession
resource_relationship_admin_session = property(fget=get_resource_relationship_admin_session)
@abc.abstractmethod
def get_resource_relationship_admin_session_for_bin(self, bin_id):
"""Gets the ``OsidSession`` associated with the resource relationship administration service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: a ``ResourceRelationshipAdminSession``
:rtype: ``osid.resource.ResourceRelationshipAdminSession``
:raise: ``NotFound`` -- no bin found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_admin()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipAdminSession
@abc.abstractmethod
def get_resource_relationship_notification_session(self, resource_relationship_receiver):
"""Gets the ``OsidSession`` associated with the resource relationship notification service.
:param resource_relationship_receiver: the notification callback
:type resource_relationship_receiver: ``osid.resource.ResourceRelationshipReceiver``
:return: a ``ResourceRelationshipNotificationSession``
:rtype: ``osid.resource.ResourceRelationshipNotificationSession``
:raise: ``NullArgument`` -- ``resource_relationship_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_notification()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipNotificationSession
@abc.abstractmethod
def get_resource_relationship_notification_session_for_bin(self, resource_relationship_receiver, bin_id):
"""Gets the ``OsidSession`` associated with the resource relationship notification service for the given bin.
:param resource_relationship_receiver: the notification callback
:type resource_relationship_receiver: ``osid.resource.ResourceRelationshipReceiver``
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: a ``ResourceRelationshipNotificationSession``
:rtype: ``osid.resource.ResourceRelationshipNotificationSession``
:raise: ``NotFound`` -- no bin found by the given ``Id``
:raise: ``NullArgument`` -- ``resource_relationship_receiver`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationshipt_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipNotificationSession
@abc.abstractmethod
def get_resource_relationship_bin_session(self):
"""Gets the session for retrieving resource relationship to bin mappings.
:return: a ``ResourceRelationshipBinSession``
:rtype: ``osid.resource.ResourceRelationshipBinSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_bin()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipBinSession
resource_relationship_bin_session = property(fget=get_resource_relationship_bin_session)
@abc.abstractmethod
def get_resource_relationship_bin_assignment_session(self):
"""Gets the session for assigning resource relationships to bin mappings.
:return: a ``ResourceRelationshipBinAssignmentSession``
:rtype: ``osid.resource.ResourceRelationshipBinAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_bin_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_bin_assignment()`` is
``true``.*
"""
return # osid.resource.ResourceRelationshipBinAssignmentSession
resource_relationship_bin_assignment_session = property(fget=get_resource_relationship_bin_assignment_session)
@abc.abstractmethod
def get_resource_relationship_smart_bin_session(self, bin_id):
"""Gets the session for managing dynamic resource relationship bins.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:return: a ``ResourceRelationshipSmartBinSession``
:rtype: ``osid.resource.ResourceRelationshipSmartBinSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_smart_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_smart_bin()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipSmartBinSession
@abc.abstractmethod
def get_bin_lookup_session(self):
"""Gets the bin lookup session.
:return: a ``BinLookupSession``
:rtype: ``osid.resource.BinLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_lookup()`` is ``true``.*
"""
return # osid.resource.BinLookupSession
bin_lookup_session = property(fget=get_bin_lookup_session)
@abc.abstractmethod
def get_bin_query_session(self):
"""Gets the bin query session.
:return: a ``BinQuerySession``
:rtype: ``osid.resource.BinQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_query()`` is ``true``.*
"""
return # osid.resource.BinQuerySession
bin_query_session = property(fget=get_bin_query_session)
@abc.abstractmethod
def get_bin_search_session(self):
"""Gets the bin search session.
:return: a ``BinSearchSession``
:rtype: ``osid.resource.BinSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_search()`` is ``true``.*
"""
return # osid.resource.BinSearchSession
bin_search_session = property(fget=get_bin_search_session)
@abc.abstractmethod
def get_bin_admin_session(self):
"""Gets the bin administrative session for creating, updating and deleteing bins.
:return: a ``BinAdminSession``
:rtype: ``osid.resource.BinAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_admin()`` is ``true``.*
"""
return # osid.resource.BinAdminSession
bin_admin_session = property(fget=get_bin_admin_session)
@abc.abstractmethod
def get_bin_notification_session(self, bin_receiver):
"""Gets the notification session for subscribing to changes to a bin.
:param bin_receiver: the notification callback
:type bin_receiver: ``osid.resource.BinReceiver``
:return: a ``BinNotificationSession``
:rtype: ``osid.resource.BinNotificationSession``
:raise: ``NullArgument`` -- ``bin_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_notification()`` is ``true``.*
"""
return # osid.resource.BinNotificationSession
@abc.abstractmethod
def get_bin_hierarchy_session(self):
"""Gets the bin hierarchy traversal session.
:return: ``a BinHierarchySession``
:rtype: ``osid.resource.BinHierarchySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_hierarchy()`` is ``true``.*
"""
return # osid.resource.BinHierarchySession
bin_hierarchy_session = property(fget=get_bin_hierarchy_session)
@abc.abstractmethod
def get_bin_hierarchy_design_session(self):
"""Gets the bin hierarchy design session.
:return: a ``BinHierarchyDesignSession``
:rtype: ``osid.resource.BinHierarchyDesignSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_hierarchy_design()`` is ``true``.*
"""
return # osid.resource.BinHierarchyDesignSession
bin_hierarchy_design_session = property(fget=get_bin_hierarchy_design_session)
@abc.abstractmethod
def get_resource_batch_manager(self):
"""Gets the ``ResourceBatchManager``.
:return: a ``ResourceBatchManager``
:rtype: ``osid.resource.batch.ResourceBatchManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_batch()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_batch()`` is ``true``.*
"""
return # osid.resource.batch.ResourceBatchManager
resource_batch_manager = property(fget=get_resource_batch_manager)
@abc.abstractmethod
def get_resource_demographic_manager(self):
"""Gets the ``ResourceDemographicManager``.
:return: a ``ResourceDemographicManager``
:rtype: ``osid.resource.demographic.ResourceDemographicManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_demographic()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_demographic()`` is ``true``.*
"""
return # osid.resource.demographic.ResourceDemographicManager
resource_demographic_manager = property(fget=get_resource_demographic_manager)
class ResourceProxyManager:
"""The resource manager provides access to resource lookup and creation session and provides interoperability tests for various aspects of this service.
Methods in this manager accept a ``Proxy``. The sessions included in
this manager are:
* ``ResourceLookupSession:`` a session to retrieve resources
* ``ResourceQuerySession:`` a session to query resources
* ``ResourceSearchSession:`` a session to search for resources
* ``ResourceAdminSession:`` a session to create and delete
resources
* ``ResourceNotificationSession:`` a session to receive
notifications pertaining to resource changes
* ``ResourceBinSession:`` a session to look up resource to bin
mappings
* ``ResourceBinAssignmentSession:`` a session to manage resource
to bin mappings
* ``ResourceSmartBinSession:`` a session to manage smart resource
bins
* ``MembershipSession:`` a session to query memberships
* ``GroupSession:`` a session to retrieve group memberships
* ``GroupAssignmentSession:`` a session to manage groups
* ``GroupNotificationSession:`` a session to retrieve
notifications on changes to group membership
* ``GroupHierarchySession:`` a session to view a group hierarchy
* ``RsourceAgentSession:`` a session to retrieve ``Resource`` and
``Agent`` mappings
* ``ResourceAgentAssignmentSession:`` a session to manage
``Resource`` and ``Agent`` mappings
* ``ResourceRelationshipLookupSession:`` a session to retrieve
resource relationships
* ``ResourceRelationshipQuerySession:`` a session to query for
resource relationships
* ``ResourceRelationshipSearchSession:`` a session to search for
resource relationships
* ``ResourceRelationshipAdminSession:`` a session to create and
delete resource relationships
* ``ResourceRelationshipNotificationSession:`` a session to
receive notifications pertaining to resource relationshipchanges
* ``ResourceRelationshipBinSession:`` a session to look up
resource relationship to bin mappings
* ``ResourceRelationshipBinAssignmentSession:`` a session to
manage resource relationship to bin mappings
* ``ResourceRelationshipSmartBinSession:`` a session to manage
smart resource relationship bins
* ``BinLookupSession: a`` session to retrieve bins
* ``BinQuerySession:`` a session to query bins
* ``BinSearchSession:`` a session to search for bins
* ``BinAdminSession:`` a session to create, update and delete bins
* ``BinNotificationSession:`` a session to receive notifications
pertaining to changes in bins
* ``BinHierarchySession:`` a session to traverse bin hierarchies
* ``BinHierarchyDesignSession:`` a session to manage bin
hierarchies
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_resource_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceLookupSession``
:rtype: ``osid.resource.ResourceLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_lookup()`` is ``true``.*
"""
return # osid.resource.ResourceLookupSession
@abc.abstractmethod
def get_resource_lookup_session_for_bin(self, bin_id, proxy):
"""Gets the ``OsidSession`` associated with the resource lookup service for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: ``a proxy``
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceLookupSession``
:rtype: ``osid.resource.ResourceLookupSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceLookupSession
@abc.abstractmethod
def get_resource_query_session(self, proxy):
"""Gets a resource query session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceQuerySession``
:rtype: ``osid.resource.ResourceQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_query()`` is ``true``.*
"""
return # osid.resource.ResourceQuerySession
@abc.abstractmethod
def get_resource_query_session_for_bin(self, bin_id, proxy):
"""Gets a resource query session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceQuerySession``
:rtype: ``osid.resource.ResourceQuerySession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceQuerySession
@abc.abstractmethod
def get_resource_search_session(self, proxy):
"""Gets a resource search session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceSearchSession``
:rtype: ``osid.resource.ResourceSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_search()`` is ``true``.*
"""
return # osid.resource.ResourceSearchSession
@abc.abstractmethod
def get_resource_search_session_for_bin(self, bin_id, proxy):
"""Gets a resource search session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceSearchSession``
:rtype: ``osid.resource.ResourceSearchSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceSearchSession
@abc.abstractmethod
def get_resource_admin_session(self, proxy):
"""Gets a resource administration session for creating, updating and deleting resources.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceAdminSession``
:rtype: ``osid.resource.ResourceAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_admin()`` is ``true``.*
"""
return # osid.resource.ResourceAdminSession
@abc.abstractmethod
def get_resource_admin_session_for_bin(self, bin_id, proxy):
"""Gets a resource administration session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceAdminSession``
:rtype: ``osid.resource.ResourceAdminSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceAdminSession
@abc.abstractmethod
def get_resource_notification_session(self, resource_receiver, proxy):
"""Gets the resource notification session for the given bin.
:param resource_receiver: notification callback
:type resource_receiver: ``osid.resource.ResourceReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceNotificationSession``
:rtype: ``osid.resource.ResourceNotificationSession``
:raise: ``NullArgument`` -- ``resource_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_notification()`` is ``true``.*
"""
return # osid.resource.ResourceNotificationSession
@abc.abstractmethod
def get_resource_notification_session_for_bin(self, resource_receiver, bin_id, proxy):
"""Gets the resource notification session for the given bin.
:param resource_receiver: notification callback
:type resource_receiver: ``osid.resource.ResourceReceiver``
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a ResourceNotificationSession``
:rtype: ``osid.resource.ResourceNotificationSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``resource_receiver, bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_resource_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceNotificationSession
@abc.abstractmethod
def get_resource_bin_session(self, proxy):
"""Gets the session for retrieving resource to bin mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceBinSession``
:rtype: ``osid.resource.ResourceBinSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_bin()`` is ``true``.*
"""
return # osid.resource.ResourceBinSession
@abc.abstractmethod
def get_resource_bin_assignment_session(self, proxy):
"""Gets the session for assigning resource to bin mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceBinAssignmentSession``
:rtype: ``osid.resource.ResourceBinAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_bin_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_bin_assignment()`` is ``true``.*
"""
return # osid.resource.ResourceBinAssignmentSession
@abc.abstractmethod
def get_resource_smart_bin_session(self, bin_id, proxy):
"""Gets the session for managing dynamic resource bins.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceSmartBinSession``
:rtype: ``osid.resource.ResourceSmartBinSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_smart_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_smart_bin()`` is ``true``.*
"""
return # osid.resource.ResourceSmartBinSession
@abc.abstractmethod
def get_membership_session(self, proxy):
"""Gets the session for querying memberships.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MembershipSession``
:rtype: ``osid.resource.MembershipSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_membership()`` is ``false``
*compliance: optional -- This method must be implemented if
``support_membership()`` is ``true``.*
"""
return # osid.resource.MembershipSession
@abc.abstractmethod
def get_membership_session_for_bin(self, bin_id, proxy):
"""Gets a resource membership session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a MembershipSession``
:rtype: ``osid.resource.MembershipSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_membership()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_membership()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.resource.MembershipSession
@abc.abstractmethod
def get_group_session(self, proxy):
"""Gets the session for retrieving gropup memberships.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``GroupSession``
:rtype: ``osid.resource.GroupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_groups()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_groups()`` is ``true``.*
"""
return # osid.resource.GroupSession
@abc.abstractmethod
def get_group_session_for_bin(self, bin_id, proxy):
"""Gets a group session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``GroupSession``
:rtype: ``osid.resource.GroupSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group()`` and ``supports_visible_federation()`` are
``true``.*
"""
return # osid.resource.GroupSession
@abc.abstractmethod
def get_group_assignment_session(self, proxy):
"""Gets the session for assigning resources to groups.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``GroupAssignmentSession``
:rtype: ``osid.resource.GroupAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_assignment()`` is ``true``.*
"""
return # osid.resource.GroupAssignmentSession
@abc.abstractmethod
def get_group_assignment_session_for_bin(self, bin_id, proxy):
"""Gets a group assignment session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``GroupAssignmentSession``
:rtype: ``osid.resource.GroupAssignmentSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_assignment()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.GroupAssignmentSession
@abc.abstractmethod
def get_group_notification_session(self, group_rceeiver, proxy):
"""Gets the notification session for notifications pertaining to resource changes.
:param group_rceeiver: the notification callback
:type group_rceeiver: ``osid.resource.GroupReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a GroupNotificationSession``
:rtype: ``osid.resource.GroupNotificationSession``
:raise: ``NullArgument`` -- ``group_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_notification()`` is ``true``.*
"""
return # osid.resource.GroupNotificationSession
@abc.abstractmethod
def get_group_notification_session_for_bin(self, group_rceeiver, bin_id, proxy):
"""Gets the group notification session for the given bin.
:param group_rceeiver: the notification callback
:type group_rceeiver: ``osid.resource.GroupReceiver``
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a GroupNotificationSession``
:rtype: ``osid.resource.GroupNotificationSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``group_receiver, bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_group_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.GroupNotificationSession
@abc.abstractmethod
def get_group_hierarchy_session(self, proxy):
"""Gets the group hierarchy traversal session for the given resource group.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a GroupHierarchySession``
:rtype: ``osid.resource.BinHierarchySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_hierarchy()`` is ``true``.*
"""
return # osid.resource.BinHierarchySession
@abc.abstractmethod
def get_group_hierarchy_session_for_bin(self, bin_id, proxy):
"""Gets a group hierarchy session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``GroupHierarchySession``
:rtype: ``osid.resource.GroupHierarchySession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_group_hierarchy()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_group_hierarchy()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.GroupHierarchySession
@abc.abstractmethod
def get_resource_agent_session(self, proxy):
"""Gets the session for retrieving resource agent mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``GroupSession``
:rtype: ``osid.resource.ResourceAgentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agents()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agents()`` is ``true``.*
"""
return # osid.resource.ResourceAgentSession
@abc.abstractmethod
def get_resource_agent_session_for_bin(self, bin_id, proxy):
"""Gets a resource agent session for the given bin.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceAgentSession``
:rtype: ``osid.resource.ResourceAgentSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceAgentSession
@abc.abstractmethod
def get_resource_agent_assignment_session(self, proxy):
"""Gets the session for assigning agents to resources.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceAgentAssignmentSession``
:rtype: ``osid.resource.ResourceAgentAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent_assignment()`` is ``true``.*
"""
return # osid.resource.ResourceAgentAssignmentSession
@abc.abstractmethod
def get_resource_agent_assignment_session_for_bin(self, bin_id, proxy):
"""Gets a resource agent session for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceAgentAssignmentSession``
:rtype: ``osid.resource.ResourceAgentAssignmentSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_agent_assignment()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_agent_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.resource.ResourceAgentAssignmentSession
@abc.abstractmethod
def get_resource_relationship_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipLookupSession``
:rtype: ``osid.resource.ResourceRelationshipLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_lookup()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipLookupSession
@abc.abstractmethod
def get_resource_relationship_lookup_session_for_bin(self, bin_id, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship lookup service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipLookupSession``
:rtype: ``osid.resource.ResourceRelationshipLookupSession``
:raise: ``NotFound`` -- no ``Bin`` found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipLookupSession
@abc.abstractmethod
def get_resource_relationship_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipQuerySession``
:rtype: ``osid.resource.ResourceRelationshipQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_query()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipQuerySession
@abc.abstractmethod
def get_resource_relationship_query_session_for_bin(self, bin_id, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship query service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipQuerySession``
:rtype: ``osid.resource.ResourceRelationshipQuerySession``
:raise: ``NotFound`` -- no ``Bin`` found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_query()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipQuerySession
@abc.abstractmethod
def get_resource_relationship_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipSearchSession``
:rtype: ``osid.resource.ResourceRelationshipSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_search()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipSearchSession
@abc.abstractmethod
def get_resource_relationship_search_session_for_bin(self, bin_id, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship search service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipSearchSession``
:rtype: ``osid.resource.ResourceRelationshipSearchSession``
:raise: ``NotFound`` -- no bin found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_search()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipSearchSession
@abc.abstractmethod
def get_resource_relationship_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipAdminSession``
:rtype: ``osid.resource.ResourceRelationshipAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_admin()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipAdminSession
@abc.abstractmethod
def get_resource_relationship_admin_session_for_bin(self, bin_id, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship administration service for the given bin.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipAdminSession``
:rtype: ``osid.resource.ResourceRelationshipAdminSession``
:raise: ``NotFound`` -- no bin found by the given ``Id``
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_admin()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipAdminSession
@abc.abstractmethod
def get_resource_relationship_notification_session(self, resource_relationship_receiver, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship notification service.
:param resource_relationship_receiver: the notification callback
:type resource_relationship_receiver: ``osid.resource.ResourceRelationshipReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipNotificationSession``
:rtype: ``osid.resource.ResourceRelationshipNotificationSession``
:raise: ``NullArgument`` -- ``resource_relationship_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_notification()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipNotificationSession
@abc.abstractmethod
def get_resource_relationship_notification_session_for_bin(self, resource_relationship_receiver, bin_id, proxy):
"""Gets the ``OsidSession`` associated with the resource relationship notification service for the given bin.
:param resource_relationship_receiver: the notification callback
:type resource_relationship_receiver: ``osid.resource.ResourceRelationshipReceiver``
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipNotificationSession``
:rtype: ``osid.resource.ResourceRelationshipNotificationSession``
:raise: ``NotFound`` -- no bin found by the given ``Id``
:raise: ``NullArgument`` -- ``resource_relationship_receiver, bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationshipt_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.resource.ResourceRelationshipNotificationSession
@abc.abstractmethod
def get_resource_relationship_bin_session(self, proxy):
"""Gets the session for retrieving resource relationship to bin mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipBinSession``
:rtype: ``osid.resource.ResourceRelationshipBinSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_bin()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipBinSession
@abc.abstractmethod
def get_resource_relationship_bin_assignment_session(self, proxy):
"""Gets the session for assigning resource relationship to bin mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipBinAssignmentSession``
:rtype: ``osid.resource.ResourceRelationshipBinAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_bin_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_bin_assignment()`` is
``true``.*
"""
return # osid.resource.ResourceRelationshipBinAssignmentSession
@abc.abstractmethod
def get_resource_relationship_smart_bin_session(self, bin_id, proxy):
"""Gets the session for managing dynamic resource relationship bins.
:param bin_id: the ``Id`` of the bin
:type bin_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceRelationshipSmartBinSession``
:rtype: ``osid.resource.ResourceRelationshipSmartBinSession``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_relationship_smart_bin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_relationship_smart_bin()`` is ``true``.*
"""
return # osid.resource.ResourceRelationshipSmartBinSession
@abc.abstractmethod
def get_bin_lookup_session(self, proxy):
"""Gets the bin lookup session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BinLookupSession``
:rtype: ``osid.resource.BinLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_lookup()`` is ``true``.*
"""
return # osid.resource.BinLookupSession
@abc.abstractmethod
def get_bin_query_session(self, proxy):
"""Gets the bin query session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BinQuerySession``
:rtype: ``osid.resource.BinQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_query()`` is ``true``.*
"""
return # osid.resource.BinQuerySession
@abc.abstractmethod
def get_bin_search_session(self, proxy):
"""Gets the bin search session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BinSearchSession``
:rtype: ``osid.resource.BinSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_search()`` is ``true``.*
"""
return # osid.resource.BinSearchSession
@abc.abstractmethod
def get_bin_admin_session(self, proxy):
"""Gets the bin administrative session for creating, updating and deleteing bins.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BinAdminSession``
:rtype: ``osid.resource.BinAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_admin()`` is ``true``.*
"""
return # osid.resource.BinAdminSession
@abc.abstractmethod
def get_bin_notification_session(self, bin_receiver, proxy):
"""Gets the notification session for subscribing to changes to a bin.
:param bin_receiver: notification callback
:type bin_receiver: ``osid.resource.BinReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BinNotificationSession``
:rtype: ``osid.resource.BinNotificationSession``
:raise: ``NullArgument`` -- ``bin_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bin_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_notification()`` is ``true``.*
"""
return # osid.resource.BinNotificationSession
@abc.abstractmethod
def get_bin_hierarchy_session(self, proxy):
"""Gets the bin hierarchy traversal session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``a BinHierarchySession``
:rtype: ``osid.resource.BinHierarchySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unimplemented`` -- ``supports_bin_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_hierarchy()`` is ``true``.*
"""
return # osid.resource.BinHierarchySession
@abc.abstractmethod
def get_bin_hierarchy_design_session(self, proxy):
"""Gets the bin hierarchy design session.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BinHierarchyDesignSession``
:rtype: ``osid.resource.BinHierarchyDesignSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unimplemented`` -- ``supports_bin_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bin_hierarchy_design()`` is ``true``.*
"""
return # osid.resource.BinHierarchyDesignSession
@abc.abstractmethod
def get_resource_batch_proxy_manager(self):
"""Gets the ``ResourceBatchProxyManager``.
:return: a ``ResourceBatchProxyManager``
:rtype: ``osid.resource.batch.ResourceBatchProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_batch()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_batch()`` is ``true``.*
"""
return # osid.resource.batch.ResourceBatchProxyManager
resource_batch_proxy_manager = property(fget=get_resource_batch_proxy_manager)
@abc.abstractmethod
def get_resource_demographic_proxy_manager(self):
"""Gets the ``ResourceDemographicProxyManager``.
:return: a ``ResourceDemographicProxyManager``
:rtype: ``osid.resource.demographic.ResourceDemographicProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_demographic()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_demographic()`` is ``true``.*
"""
return # osid.resource.demographic.ResourceDemographicProxyManager
resource_demographic_proxy_manager = property(fget=get_resource_demographic_proxy_manager)
| 40.290605
| 157
| 0.651986
| 10,613
| 105,924
| 6.351927
| 0.024027
| 0.037382
| 0.042425
| 0.03394
| 0.963108
| 0.947354
| 0.919348
| 0.893047
| 0.840357
| 0.825523
| 0
| 0
| 0.224925
| 105,924
| 2,628
| 158
| 40.305936
| 0.821118
| 0.719865
| 0
| 0.614894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304255
| false
| 0
| 0.002128
| 0
| 0.695745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
0be7a8ccd5fdbd49e172bb2ac46ef53c67b210d8
| 19,364
|
py
|
Python
|
tools/telemetry/telemetry/results/buildbot_page_measurement_results_unittest.py
|
aranajhonny/chromium
|
caf5bcb822f79b8997720e589334266551a50a13
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2019-01-16T03:57:39.000Z
|
2019-01-16T03:57:39.000Z
|
tools/telemetry/telemetry/results/buildbot_page_measurement_results_unittest.py
|
aranajhonny/chromium
|
caf5bcb822f79b8997720e589334266551a50a13
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2018-02-10T21:00:08.000Z
|
2018-03-20T05:09:50.000Z
|
tools/telemetry/telemetry/results/buildbot_page_measurement_results_unittest.py
|
aranajhonny/chromium
|
caf5bcb822f79b8997720e589334266551a50a13
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import perf_tests_helper
from telemetry.page import page_set
from telemetry.value import histogram
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
from telemetry.results import base_test_results_unittest
from telemetry.results import buildbot_page_measurement_results
def _MakePageSet():
ps = page_set.PageSet(file_path=os.path.dirname(__file__))
ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
ps.AddPageWithDefaultRunNavigate('http://www.baz.com/')
return ps
class SummarySavingPageMeasurementResults(
buildbot_page_measurement_results.BuildbotPageMeasurementResults):
def __init__(self, trace_tag=''):
super(SummarySavingPageMeasurementResults, self).__init__(
None, trace_tag=trace_tag)
self.results = []
def _PrintPerfResult(self, *args):
res = perf_tests_helper.PrintPerfResult(*args, print_to_stdout=False)
self.results.append(res)
class BuildbotPageMeasurementResultsTest(
base_test_results_unittest.BaseTestResultsUnittest):
def test_basic_summary(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 7))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = ['RESULT a: http___www.bar.com_= 7 seconds',
'RESULT a: http___www.foo.com_= 3 seconds',
'*RESULT a: a= [3,7] seconds\nAvg a: 5.000000seconds\n' +
'Sd a: 2.828427seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 0 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_basic_summary_with_only_one_page(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.PrintSummary()
expected = ['*RESULT a: a= 3 seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 0 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_basic_summary_nonuniform_results(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.AddValue(
scalar.ScalarValue(test_page_set.pages[0], 'b', 'seconds', 10))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 3))
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'b', 'seconds', 10))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.WillMeasurePage(test_page_set.pages[2])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[2], 'a', 'seconds', 7))
# Note, page[2] does not report a 'b' metric.
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[2])
measurement_results.PrintSummary()
expected = ['RESULT a: http___www.bar.com_= 3 seconds',
'RESULT a: http___www.baz.com_= 7 seconds',
'RESULT a: http___www.foo.com_= 3 seconds',
'*RESULT a: a= [3,3,7] seconds\nAvg a: 4.333333seconds\n' +
'Sd a: 2.309401seconds',
'RESULT b: http___www.bar.com_= 10 seconds',
'RESULT b: http___www.foo.com_= 10 seconds',
'*RESULT b: b= [10,10] seconds\nAvg b: 10.000000seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 0 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_basic_summary_pass_and_fail_page(self):
"""If a page failed, only print summary for individual pages."""
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddFailureMessage(test_page_set.pages[0], 'message')
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 7))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = ['RESULT a: http___www.bar.com_= 7 seconds',
'RESULT a: http___www.foo.com_= 3 seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 1 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_repeated_pageset_one_iteration_one_page_fails(self):
"""Page fails on one iteration, no averaged results should print."""
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 7))
measurement_results.DidMeasurePage()
measurement_results.AddFailureMessage(test_page_set.pages[1], 'message')
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 4))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 8))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = ['RESULT a: http___www.bar.com_= [7,8] seconds\n' +
'Avg a: 7.500000seconds\n' +
'Sd a: 0.707107seconds',
'RESULT a: http___www.foo.com_= [3,4] seconds\n' +
'Avg a: 3.500000seconds\n' +
'Sd a: 0.707107seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 1 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_repeated_pageset(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 7))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 4))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 8))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = ['RESULT a: http___www.bar.com_= [7,8] seconds\n' +
'Avg a: 7.500000seconds\n' +
'Sd a: 0.707107seconds',
'RESULT a: http___www.foo.com_= [3,4] seconds\n' +
'Avg a: 3.500000seconds\n' +
'Sd a: 0.707107seconds',
'*RESULT a: a= [3,7,4,8] seconds\n' +
'Avg a: 5.500000seconds\n' +
'Sd a: 2.380476seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 0 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count'
]
self.assertEquals(expected, measurement_results.results)
def test_repeated_pages(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'a', 'seconds', 4))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 7))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'a', 'seconds', 8))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = ['RESULT a: http___www.bar.com_= [7,8] seconds\n' +
'Avg a: 7.500000seconds\n' +
'Sd a: 0.707107seconds',
'RESULT a: http___www.foo.com_= [3,4] seconds\n' +
'Avg a: 3.500000seconds\n' +
'Sd a: 0.707107seconds',
'*RESULT a: a= [3,4,7,8] seconds\n' +
'Avg a: 5.500000seconds\n' +
'Sd a: 2.380476seconds',
'RESULT telemetry_page_measurement_results: ' +
'num_failed= 0 count',
'RESULT telemetry_page_measurement_results: ' +
'num_errored= 0 count'
]
self.assertEquals(expected, measurement_results.results)
def test_overall_results_trace_tag(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults(trace_tag='_ref')
measurement_results.AddSummaryValue(
scalar.ScalarValue(None, 'a', 'seconds', 1))
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'b', 'seconds', 2))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'b', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.AddSummaryValue(
scalar.ScalarValue(None, 'c', 'seconds', 4))
measurement_results.PrintSummary()
expected = [
'*RESULT b: b_ref= [2,3] seconds\n' +
'Avg b: 2.500000seconds\nSd b: 0.707107seconds',
'*RESULT a: a_ref= 1 seconds',
'*RESULT c: c_ref= 4 seconds',
'RESULT telemetry_page_measurement_results: num_failed= 0 count',
'RESULT telemetry_page_measurement_results: num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_overall_results_page_runs_twice(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.AddSummaryValue(
scalar.ScalarValue(None, 'a', 'seconds', 1))
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'b', 'seconds', 2))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'b', 'seconds', 3))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.PrintSummary()
expected = [
'RESULT b: http___www.foo.com_= [2,3] seconds\n' +
'Avg b: 2.500000seconds\nSd b: 0.707107seconds',
'*RESULT b: b= [2,3] seconds\n' +
'Avg b: 2.500000seconds\nSd b: 0.707107seconds',
'*RESULT a: a= 1 seconds',
'RESULT telemetry_page_measurement_results: num_failed= 0 count',
'RESULT telemetry_page_measurement_results: num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_unimportant_results(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.AddSummaryValue(
scalar.ScalarValue(None, 'a', 'seconds', 1, important=False))
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[0], 'b', 'seconds', 2, important=False))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(scalar.ScalarValue(
test_page_set.pages[1], 'b', 'seconds', 3, important=False))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
self.assertEquals(
measurement_results.results,
['RESULT b: http___www.bar.com_= 3 seconds',
'RESULT b: http___www.foo.com_= 2 seconds',
'RESULT b: b= [2,3] seconds\n' +
'Avg b: 2.500000seconds\nSd b: 0.707107seconds',
'RESULT a: a= 1 seconds',
'RESULT telemetry_page_measurement_results: num_failed= 0 count',
'RESULT telemetry_page_measurement_results: num_errored= 0 count'])
def test_list_value(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.AddSummaryValue(
list_of_scalar_values.ListOfScalarValues(None, 'a', 'seconds', [1, 1]))
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(list_of_scalar_values.ListOfScalarValues(
test_page_set.pages[0], 'b', 'seconds', [2, 2]))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(list_of_scalar_values.ListOfScalarValues(
test_page_set.pages[1], 'b', 'seconds', [3, 3]))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = [
'RESULT b: http___www.bar.com_= [3,3] seconds\n' +
'Avg b: 3.000000seconds',
'RESULT b: http___www.foo.com_= [2,2] seconds\n' +
'Avg b: 2.000000seconds',
'*RESULT b: b= [2,2,3,3] seconds\nAvg b: 2.500000seconds\n' +
'Sd b: 0.577350seconds',
'*RESULT a: a= [1,1] seconds\nAvg a: 1.000000seconds',
'RESULT telemetry_page_measurement_results: num_failed= 0 count',
'RESULT telemetry_page_measurement_results: num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
def test_histogram(self):
test_page_set = _MakePageSet()
measurement_results = SummarySavingPageMeasurementResults()
measurement_results.WillMeasurePage(test_page_set.pages[0])
measurement_results.AddValue(histogram.HistogramValue(
test_page_set.pages[0], 'a', 'units',
raw_value_json='{"buckets": [{"low": 1, "high": 2, "count": 1}]}',
important=False))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[0])
measurement_results.WillMeasurePage(test_page_set.pages[1])
measurement_results.AddValue(histogram.HistogramValue(
test_page_set.pages[1], 'a', 'units',
raw_value_json='{"buckets": [{"low": 2, "high": 3, "count": 1}]}',
important=False))
measurement_results.DidMeasurePage()
measurement_results.AddSuccess(test_page_set.pages[1])
measurement_results.PrintSummary()
expected = [
'HISTOGRAM a: http___www.bar.com_= ' +
'{"buckets": [{"low": 2, "high": 3, "count": 1}]} units\n' +
'Avg a: 2.500000units',
'HISTOGRAM a: http___www.foo.com_= ' +
'{"buckets": [{"low": 1, "high": 2, "count": 1}]} units\n' +
'Avg a: 1.500000units',
'RESULT telemetry_page_measurement_results: num_failed= 0 count',
'RESULT telemetry_page_measurement_results: num_errored= 0 count']
self.assertEquals(expected, measurement_results.results)
| 43.41704
| 79
| 0.696395
| 2,213
| 19,364
| 5.792589
| 0.0723
| 0.263983
| 0.088384
| 0.113581
| 0.879866
| 0.86091
| 0.851158
| 0.832592
| 0.822529
| 0.811764
| 0
| 0.031342
| 0.192625
| 19,364
| 445
| 80
| 43.514607
| 0.788602
| 0.016629
| 0
| 0.724518
| 0
| 0.013774
| 0.223303
| 0.044145
| 0
| 0
| 0
| 0
| 0.033058
| 1
| 0.041322
| false
| 0.002755
| 0.038567
| 0
| 0.088154
| 0.002755
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0bfa41c98293d89368dabf00edf18e75d0949629
| 1,663
|
py
|
Python
|
app/models.py
|
ivan4oto/balkanultra
|
02d7a1b99e0ca018ae4733b2a4d64fd0832399cc
|
[
"MIT"
] | null | null | null |
app/models.py
|
ivan4oto/balkanultra
|
02d7a1b99e0ca018ae4733b2a4d64fd0832399cc
|
[
"MIT"
] | null | null | null |
app/models.py
|
ivan4oto/balkanultra
|
02d7a1b99e0ca018ae4733b2a4d64fd0832399cc
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.core.mail import send_mail
# Create your models here.
class UltraAthlete(models.Model):
first_name = models.CharField(max_length=25)
last_name = models.CharField(max_length=25)
phone = models.CharField(max_length=50)
email = models.EmailField()
gender = models.CharField(max_length=25)
paid = models.BooleanField(default=False)
payment_mail = models.EmailField()
first_link = models.URLField(blank=True, null=True)
second_link = models.URLField(blank=True, null=True)
def send_mail(self):
send_mail(
'Балкан Ултра - успешна регистрация',
'Благодарим Ви, че се регистрирахте за Балкан Ултра. Очакваме ви на 06.08 2022',
'balkanultra.noreply@gmail.com',
[self.email],
fail_silently=False,
)
def __str__(self):
return str(self.first_name) + ' ' + str(self.last_name)
class SkyAthlete(models.Model):
first_name = models.CharField(max_length=25)
last_name = models.CharField(max_length=25)
phone = models.CharField(max_length=50)
email = models.EmailField()
gender = models.CharField(max_length=25)
paid = models.BooleanField(default=False)
payment_mail = models.EmailField()
def send_mail(self):
send_mail(
'Балкан Ултра - успешна регистрация',
'Благодарим Ви, че се регистрирахте за Балкан Ултра. Очакваме ви на 06.08 2022',
'balkanultra.noreply@gmail.com',
[self.email],
fail_silently=False,
)
def __str__(self):
return str(self.first_name) + ' ' + str(self.last_name)
| 33.938776
| 92
| 0.661455
| 206
| 1,663
| 5.169903
| 0.305825
| 0.112676
| 0.135211
| 0.180282
| 0.888263
| 0.888263
| 0.888263
| 0.822535
| 0.822535
| 0.822535
| 0
| 0.025137
| 0.234516
| 1,663
| 49
| 93
| 33.938776
| 0.811469
| 0.014432
| 0
| 0.8
| 0
| 0
| 0.172161
| 0.035409
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.05
| 0.05
| 0.65
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
0bffe0a6e85aed3b62a3ccbbf962124ea0f9c1a4
| 1,481
|
py
|
Python
|
pycozmo/tests/test_frame.py
|
gimait/pycozmo
|
601d9c09903b9300e8990723cae95974212afb09
|
[
"MIT"
] | 1
|
2020-12-20T13:33:41.000Z
|
2020-12-20T13:33:41.000Z
|
pycozmo/tests/test_frame.py
|
solosito/pycozmo
|
5d28118eb8f7a625ae4a66054dabf19b4fe27483
|
[
"MIT"
] | null | null | null |
pycozmo/tests/test_frame.py
|
solosito/pycozmo
|
5d28118eb8f7a625ae4a66054dabf19b4fe27483
|
[
"MIT"
] | 1
|
2019-12-25T13:36:16.000Z
|
2019-12-25T13:36:16.000Z
|
import unittest
from pycozmo.frame import Frame
class TestFrame(unittest.TestCase):
def test_from_bytes_multi(self):
f = Frame.from_bytes(
b'COZ\x03RE\x01\x07\x9d\n\xa0\n\x8f\x00\x04\x01\x00\x8f\x04\x1d\x00\x97\x1a\x00\x15\xb0\xaa\x9c'
b'\xac\xb2@\xa8\xba^\xac\xb2@\x02\xb4\xa2\xa0\xb0\xaa@\xac\xb2`\xb0\xaa\x1b\x04 \x00\x03\x1f\x80'
b'\x1f\x80\t\x00\x00\x00\x00\x00\x1f\x80\x1f\x80\t\x00\x00\x00\x00\x00\x1f\x80\x1f\x80\t\x00\x00'
b'\x00\x00\x00\x00\x04\x16\x00\x11\x1f\x80\x1f\x80\t\x00\x00\x00\x00\x00\x1f\x80\x1f\x80\t\x00'
b'\x00\x00\x00\x00\x00')
self.assertEqual(f.type.value, 7)
self.assertEqual(f.first_seq, 2717)
self.assertEqual(f.seq, 2720)
self.assertEqual(f.ack, 143)
self.assertEqual(len(f.pkts), 4)
def test_encode_decode(self):
expected = \
b'COZ\x03RE\x01\x07\x9d\n\xa0\n\x8f\x00\x04\x01\x00\x8f\x04\x1d\x00\x97\x1a\x00\x15\xb0\xaa\x9c' \
b'\xac\xb2@\xa8\xba^\xac\xb2@\x02\xb4\xa2\xa0\xb0\xaa@\xac\xb2`\xb0\xaa\x1b\x04 \x00\x03\x1f\x80' \
b'\x1f\x80\t\x00\x00\x00\x00\x00\x1f\x80\x1f\x80\t\x00\x00\x00\x00\x00\x1f\x80\x1f\x80\t\x00\x00' \
b'\x00\x00\x00\x00\x04\x16\x00\x11\x1f\x80\x1f\x80\t\x00\x00\x00\x00\x00\x1f\x80\x1f\x80\t\x00' \
b'\x00\x00\x00\x00\x00'
f = Frame.from_bytes(expected)
actual = f.to_bytes()
self.assertEqual(expected, actual)
| 46.28125
| 111
| 0.625253
| 278
| 1,481
| 3.298561
| 0.230216
| 0.261723
| 0.274809
| 0.235551
| 0.63904
| 0.63904
| 0.63904
| 0.63904
| 0.63904
| 0.63904
| 0
| 0.244444
| 0.179608
| 1,481
| 31
| 112
| 47.774194
| 0.510288
| 0
| 0
| 0
| 0
| 0.32
| 0.531081
| 0.481081
| 0
| 0
| 0
| 0
| 0.24
| 1
| 0.08
| false
| 0
| 0.08
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
04166f4a50586a9224f74c0c7f0d0b943de8765e
| 103
|
py
|
Python
|
tis_catch_weight_extension/models/__init__.py
|
guadaltech/jdvp-odoo
|
e399ecf4c66e15a60a8561edec359e75be08140b
|
[
"ADSL"
] | null | null | null |
tis_catch_weight_extension/models/__init__.py
|
guadaltech/jdvp-odoo
|
e399ecf4c66e15a60a8561edec359e75be08140b
|
[
"ADSL"
] | 1
|
2020-04-02T06:39:18.000Z
|
2020-04-02T06:39:18.000Z
|
tis_catch_weight_extension/models/__init__.py
|
guadaltech/jdvp-odoo
|
e399ecf4c66e15a60a8561edec359e75be08140b
|
[
"ADSL"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import pos_order_line
from . import pos_order
from . import stock_move
| 17.166667
| 28
| 0.708738
| 16
| 103
| 4.3125
| 0.625
| 0.434783
| 0.376812
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0.174757
| 103
| 5
| 29
| 20.6
| 0.8
| 0.203884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0429c4554d9c04583eff6fb7735a8b47fb017b42
| 10,528
|
py
|
Python
|
withoutrest/myapp/views.py
|
ramadevim/restapi
|
0afd7d77d5e431274c2c6f5120ab8be933505417
|
[
"MIT"
] | null | null | null |
withoutrest/myapp/views.py
|
ramadevim/restapi
|
0afd7d77d5e431274c2c6f5120ab8be933505417
|
[
"MIT"
] | null | null | null |
withoutrest/myapp/views.py
|
ramadevim/restapi
|
0afd7d77d5e431274c2c6f5120ab8be933505417
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.views.generic import View
from .models import Employee
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
from .utils import is_json
from .forms import EmpForm
import json
#1.using json functions to get the data
class EmployeeCBV1(View):
def get(self,request,id,*args,**kwargs):
emp=Employee.objects.get(id=id)
emp_data={
'eno':emp.eno,
'ename':emp.ename,
'esal':emp.esal,
'eaddr':emp.eaddr,
}
json_data=json.dumps(emp_data)
return HttpResponse(json_data,content_type='application/json')
from django.http import HttpResponse
from django.core.serializers import serialize
#
# #2.django serializer data it provides models and id and fields unwantes data also provided in end of output
# #convert python to json direct
class EmployeeCBV(View):
def get(self,request,id,*args,**kwargs):
emp=Employee.objects.get(id=id)
json_data=serialize('json',[emp,],fields=('eno','ename','eaddr'))
return HttpResponse(json_data,content_type='application/json')
from .mixins import HttpMixin
#user defined errors
@method_decorator(csrf_exempt,name='dispatch')
class EmployeeCBV3(HttpMixin,View):
def get_object_by_id(self,id):
try:
emp = Employee.objects.get(id=id)
except Employee.DoesNotExist:
emp = None
return emp
def get(self,request,id,*args,**kwargs):
try:
emp = Employee.objects.get(id=id)
except Employee.DoesNotExist:
json_data=json.dumps({'msg':'the employee details not exist pls check the id '})
return self.render_to_http_response(json_data,status=404)
else:
json_data = serialize('json', [emp, ])
return self.render_to_http_response(json_data)
def put(self,request,id,*args,**kwargs):
emp=self.get_object_by_id(id)
if emp is None:
json_data = json.dumps({'msg': 'no matched record resource found,not perform to update '})
return self.render_to_http_response(json_data, status=404)
data = request.body
valid_json = is_json(data)
if not valid_json:
json_data = json.dumps({'msg': 'pls send vaid json data'})
return self.render_to_http_response(json_data, status=400)
provided_data = json.loads(data)
original_data={
'eno':emp.eno,
'ename':emp.ename,
'esal':emp.esal,
'eaddr':emp.eaddr,
}
original_data.update(provided_data,instance=emp)
form = EmpForm(original_data)
if form.is_valid():
form.save()
json_data = json.dumps({'msg': 'Resource updated successfully'})
return self.render_to_http_response(json_data)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_http_response(json_data, status=400)
def delete(self,request,id,*args,**kwargs):
emp = self.get_object_by_id(id)
if emp is None:
json_data = json.dumps({'msg': 'no matched record resource found,not possible to delete the record '})
return self.render_to_http_response(json_data, status=404)
# t=emp,delete()
# print(t)
status,deleted_item=emp.delete()
if status == 1:
json_data = json.dumps({'msg': 'resource deleted successfully '})
return self.render_to_http_response(json_data)
json_data = json.dumps({'msg': 'unable to delete pls try again '})
return self.render_to_http_response(json_data)
#
# #user defined errors
class EmployeeCBV4(View):
def get(self,request,id,*args,**kwargs):
emp = Employee.objects.get(id=id)
json_data = serialize('json', [emp, ])
return HttpResponse(json_data,content_type='application/json')
#
# #qs like model
class EmployeelistCBV(View):
def get(self,request,*args,**kwargs):
qs=Employee.objects.all()
json_data=serialize('json',qs)
return HttpResponse(json_data,content_type='application/json')
#
# #3.convert json to python dict
class EmployeelistCBV1(View):
def get(self,request,*args,**kwargs):
qs=Employee.objects.all()
json_data=serialize('json',qs)
p_data=json.loads(json_data)
final_list=[]
for obj in p_data:
emp_data=obj['fields']
final_list.append(emp_data)
json_data=json.dumps(final_list)
return HttpResponse(json_data,content_type='application/json')
# #using mixins(like multiple inheritance) we are calling mixin class serialize
from .mixins import Serialize,HttpMixin
#
@method_decorator(csrf_exempt,name='dispatch')
class EmployeelistCBV2(HttpMixin,Serialize,View):
def get(self,request,*args,**kwargs):
qs=Employee.objects.all()
json_data=self.add(qs)
return HttpResponse(json_data,content_type='application/json')
def post(self, request, *args, **kwargs):
data=request.body
valid_json=is_json(data)
if not valid_json:
json_data=json.dumps({'msg':'pls send vaid json data'})
return self.render_to_http_response(json_data,status=400)
empdata=json.loads(data)
form=EmpForm(empdata)
if form.is_valid():
form.save()
json_data = json.dumps({'msg': 'Resource created successfully'})
return self.render_to_http_response(json_data)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_http_response(json_data, status=400)
class EmployeelistCBV3(Serialize,View):
def get(self,request,*args,**kwargs):
try:
qs=Employee.objects.all()
except Employee.DoesNotExist:
json_data=json.dumps({'msg':'the employee details not exist pls check the id '})
else:
json_data=self.add(qs)
return HttpResponse(json_data,content_type='application/json')
#using one endpoint to get with id and without id
@method_decorator(csrf_exempt,name='dispatch')
class EmployeecrudCBV(HttpMixin,Serialize,View):
def get_object_by_id(self,id):
try:
emp = Employee.objects.get(id=id)
except Employee.DoesNotExist:
emp = None
return emp
def get(self,request,*args,**kwargs):
data = request.body
valid_json = is_json(data)
if not valid_json:
json_data = json.dumps({'msg': 'pls send valid json data'})
return self.render_to_http_response(json_data, status=400)
pdata=json.loads(data)
id=pdata.get('id',None)
if id is not None:
emp=self.get_object_by_id(id)
if emp is None:
json_data=json.dumps({'msg':'the resource not available with matched id'})
return self.render_to_http_response(json_data,status=404)
json_data = serialize( 'json',[emp,])
return self.render_to_http_response(json_data)
qs=Employee.objects.all()
json_data=serialize('json',qs)
return self.render_to_http_response(json_data)
def post(self, request, *args, **kwargs):
data=request.body
valid_json=is_json(data)
if not valid_json:
json_data=json.dumps({'msg':'pls send vaid json data'})
return self.render_to_http_response(json_data,status=400)
empdata=json.loads(data)
form=EmpForm(empdata)
if form.is_valid():
form.save()
json_data = json.dumps({'msg': 'Resource created successfully'})
return self.render_to_http_response(json_data)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_http_response(json_data, status=400)
def put(self, request, *args, **kwargs):
data = request.body
valid_json = is_json(data)
if not valid_json:
json_data = json.dumps({'msg': 'pls send vaid json data'})
return self.render_to_http_response(json_data, status=400)
pdata=json.loads(data)
id=pdata.get('id',None)
if id is None:
json_data=json.dumps({'msg':'To perform updation id is mandatory pls provide'})
return self.render_to_http_response(json_data,status=404)
emp=self.get_object_by_id(id)
if emp is None:
json_data=json.dumps({'msg':'the requested id is not available with matched id so its not possible to update '})
return self.render_to_http_response(json_data)
provide_data=json.loads(data)
original_data={
'eno':emp.eno,
'ename':emp.ename,
'esal':emp.esal,
'eaddr':emp.eaddr,
}
original_data.update(provide_data)
form = EmpForm(original_data,instance=emp)
if form.is_valid():
form.save()
json_data = json.dumps({'msg': 'Resource Update successfully'})
return self.render_to_http_response(json_data)
if form.errors:
json_data = json.dumps(form.errors)
return self.render_to_http_response(json_data, status=400)
def delete(self, request,*args, **kwargs):
data = request.body
valid_json = is_json(data)
if not valid_json:
json_data = json.dumps({'msg': 'pls send valid json data'})
return self.render_to_http_response(json_data, status=400)
pdata = json.loads(data)
id = pdata.get('id', None)
if id is not None:
emp1 = self.get_object_by_id(id)
if emp1 is None:
json_data = json.dumps({'msg': 'the resource not available with matched id'})
return self.render_to_http_response(json_data, status=404)
# emp1.delete()
status, deleted_item = emp1.delete()
if status == 1:
json_data = json.dumps({'msg': 'resource deleted successfully '})
return self.render_to_http_response(json_data)
json_data = json.dumps({'msg': 'unable to delete pls try again '})
return self.render_to_http_response(json_data)
json_data = json.dumps({'msg': 'To perform deletion id is mandatory pls provide'})
return self.render_to_http_response(json_data, status=404)
| 34.976744
| 124
| 0.633644
| 1,374
| 10,528
| 4.679039
| 0.118632
| 0.108259
| 0.059729
| 0.076684
| 0.80308
| 0.791725
| 0.791725
| 0.76046
| 0.74475
| 0.705086
| 0
| 0.008457
| 0.258739
| 10,528
| 300
| 125
| 35.093333
| 0.815351
| 0.039989
| 0
| 0.756637
| 0
| 0
| 0.115821
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075221
| false
| 0
| 0.053097
| 0
| 0.336283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0447cdd436231fafe39972f2677f87b9c8fad74b
| 6,261
|
py
|
Python
|
test/test_decorators.py
|
pythoro/fastwire
|
7b0391b3088f34918dfb0b4fff1836b07d9a378a
|
[
"MIT"
] | null | null | null |
test/test_decorators.py
|
pythoro/fastwire
|
7b0391b3088f34918dfb0b4fff1836b07d9a378a
|
[
"MIT"
] | null | null | null |
test/test_decorators.py
|
pythoro/fastwire
|
7b0391b3088f34918dfb0b4fff1836b07d9a378a
|
[
"MIT"
] | 1
|
2020-05-28T11:00:39.000Z
|
2020-05-28T11:00:39.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 21 22:47:12 2019
@author: Reuben
"""
import fastwire
import unittest
class Test_Signal_Decorators(unittest.TestCase):
def test_receive(self):
signal = fastwire.Signal()
class A(fastwire.Wired):
@fastwire.receive(signal)
def connected(self, a):
self._a = a
a = A()
self.assertEqual(len(signal._receivers.keys()), 1)
def test_supply(self):
signal = fastwire.Signal(receiver_limit=1)
class A(fastwire.Wired):
@fastwire.supply(signal)
def connected(self, a):
self._a = a
a = A()
self.assertEqual(len(signal._receivers.keys()), 1)
def test_receive_emit(self):
signal = fastwire.Signal()
class A(fastwire.Wired):
@fastwire.receive(signal)
def connected(self, a):
self._a = a
a = A()
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_fn_receive_emit(self):
signal = fastwire.Signal()
test = [0]
@fastwire.fn_receive(signal)
def connected(a):
test[0] = a
val = 5.7
signal.emit(a=val)
self.assertEqual(test[0], val)
class Test_Wire_Decorators(unittest.TestCase):
def test_receive(self):
wire = fastwire.Wire()
class A(fastwire.Wired):
@fastwire.receive(wire)
def connected(self, a):
self._a = a
a = A()
self.assertFalse(wire._emit == wire.emit)
def test_supply(self):
wire = fastwire.Wire()
class A(fastwire.Wired):
@fastwire.supply(wire)
def connected(self, a):
self._a = a
a = A()
self.assertFalse(wire._emit == wire.emit)
def test_receive_emit(self):
wire = fastwire.Wire()
class A(fastwire.Wired):
@fastwire.receive(wire)
def connected(self, a):
self._a = a
a = A()
val = 5.7
wire.emit(a=val)
self.assertEqual(a._a, val)
def test_fn_receive_emit(self):
wire = fastwire.Wire()
test = [0]
@fastwire.fn_receive(wire)
def connected(a):
test[0] = a
val = 5.7
wire.emit(a=val)
self.assertEqual(test[0], val)
class Test_Box_Decorators(unittest.TestCase):
def test_receive_emit_box(self):
box = fastwire.SignalBox()
box.add('test')
class A(fastwire.Wired):
@fastwire.receive('test_signal', box=box)
def connected(self, a):
self._a = a
a = A()
signal = box['test_signal']
self.assertEqual(signal.name, 'test_signal')
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_receive_emit_box_decorator(self):
box = fastwire.SignalBox()
box.add('test')
class A(fastwire.Wired):
@box.receive('test_signal')
def connected(self, a):
self._a = a
a = A()
signal = box['test_signal']
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_supply_emit_box_decorator(self):
box = fastwire.SignalBox()
box.add('test')
class A(fastwire.Wired):
@box.supply('test_signal')
def connected(self, a):
self._a = a
a = A()
signal = box['test_signal']
self.assertEqual(signal.name, 'test_signal')
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_fn_receive_emit_box(self):
box = fastwire.SignalBox()
box.add('test')
test = [0]
@fastwire.fn_receive('test_signal', box=box)
def connected(a):
test[0] = a
signal = box['test_signal']
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(test[0], val)
class Test_Container_Decorators(unittest.TestCase):
def test_receive_emit_container(self):
container = fastwire.SignalContainer()
class A(fastwire.Wired):
@fastwire.receive('test_signal', container=container)
def connected(self, a):
self._a = a
a = A()
signal = container['test_signal']
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_receive_emit_container_decorator(self):
container = fastwire.SignalContainer()
class A(fastwire.Wired):
@container.receive('test_signal')
def connected(self, a):
self._a = a
a = A()
signal = container['test_signal']
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_supply_emit_container_decorator(self):
container = fastwire.SignalContainer()
class A(fastwire.Wired):
@container.supply('test_signal')
def connected(self, a):
self._a = a
a = A()
signal = container['test_signal']
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(a._a, val)
def test_fn_receive_emit_container(self):
container = fastwire.SignalContainer()
test = [0]
@fastwire.fn_receive('test_signal', container=container)
def connected(a):
test[0] = a
signal = container['test_signal']
self.assertEqual(len(signal._receivers.keys()), 1)
val = 5.7
signal.emit(a=val)
self.assertEqual(test[0], val)
| 25.144578
| 65
| 0.537933
| 723
| 6,261
| 4.510373
| 0.070539
| 0.026986
| 0.022079
| 0.069917
| 0.948482
| 0.934989
| 0.905244
| 0.795155
| 0.762956
| 0.737504
| 0
| 0.014563
| 0.341958
| 6,261
| 249
| 66
| 25.144578
| 0.776942
| 0.011979
| 0
| 0.846591
| 0
| 0
| 0.034633
| 0
| 0
| 0
| 0
| 0
| 0.147727
| 1
| 0.181818
| false
| 0
| 0.011364
| 0
| 0.284091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
04539e879ee08c78fd0f4208ea4855647c309b2d
| 83
|
py
|
Python
|
src/satextractor/scheduler/__init__.py
|
carderne/sat-extractor
|
d1b576db3809bce7b0d1ed2c47c8a86fdcad1db3
|
[
"BSD-2-Clause"
] | null | null | null |
src/satextractor/scheduler/__init__.py
|
carderne/sat-extractor
|
d1b576db3809bce7b0d1ed2c47c8a86fdcad1db3
|
[
"BSD-2-Clause"
] | null | null | null |
src/satextractor/scheduler/__init__.py
|
carderne/sat-extractor
|
d1b576db3809bce7b0d1ed2c47c8a86fdcad1db3
|
[
"BSD-2-Clause"
] | null | null | null |
from .scheduler import create_tasks_by_splits
from .scheduler import get_scheduler
| 27.666667
| 45
| 0.879518
| 12
| 83
| 5.75
| 0.666667
| 0.376812
| 0.550725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 83
| 2
| 46
| 41.5
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f0a7b6f675c3b629269a03e3eefac7f522fe3573
| 113
|
py
|
Python
|
Chapter06/Exercise6.01/form_project/form_example/views.py
|
PacktPublishing/Web-Development-Projects-with-Django
|
531bc4d58d614888cc81b7fd6f8ec859f5a65217
|
[
"MIT"
] | 97
|
2021-03-01T12:54:30.000Z
|
2022-03-28T02:57:26.000Z
|
Chapter06/Exercise6.01/form_project/form_example/views.py
|
PacktPublishing/Web-Development-Projects-with-Django
|
531bc4d58d614888cc81b7fd6f8ec859f5a65217
|
[
"MIT"
] | 81
|
2020-08-27T04:56:04.000Z
|
2022-03-12T00:53:40.000Z
|
Chapter06/Exercise6.01/form_project/form_example/views.py
|
PacktPublishing/Web-Development-Projects-with-Django
|
531bc4d58d614888cc81b7fd6f8ec859f5a65217
|
[
"MIT"
] | 163
|
2020-12-25T14:38:38.000Z
|
2022-03-30T10:31:40.000Z
|
from django.shortcuts import render
def form_example(request):
return render(request, "form-example.html")
| 18.833333
| 47
| 0.769912
| 15
| 113
| 5.733333
| 0.733333
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132743
| 113
| 5
| 48
| 22.6
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0.150442
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
503af1e0b9aad0941925b51cd654af91a984f84c
| 204,883
|
py
|
Python
|
tests/trusted_values_dict.py
|
ksible/nrpytutorial
|
4ca6e9da22def2a9c9bcbcad75847fd1db159f4b
|
[
"BSD-2-Clause"
] | 1
|
2019-12-23T05:31:25.000Z
|
2019-12-23T05:31:25.000Z
|
tests/trusted_values_dict.py
|
ksible/nrpytutorial
|
4ca6e9da22def2a9c9bcbcad75847fd1db159f4b
|
[
"BSD-2-Clause"
] | null | null | null |
tests/trusted_values_dict.py
|
ksible/nrpytutorial
|
4ca6e9da22def2a9c9bcbcad75847fd1db159f4b
|
[
"BSD-2-Clause"
] | 2
|
2019-11-14T03:31:18.000Z
|
2019-12-12T13:42:52.000Z
|
from mpmath import mpf, mp, mpc
from UnitTesting.standard_constants import precision
mp.dps = precision
trusted_values_dict = {}
# Generated on: 2019-08-16
trusted_values_dict['rfm_Cartesian__reference_metric__True__globals'] = {'UnitVectors[0][0]': mpf('1.0'), 'UnitVectors[0][1]': mpf('0.0'), 'UnitVectors[0][2]': mpf('0.0'), 'UnitVectors[1][0]': mpf('0.0'), 'UnitVectors[1][1]': mpf('1.0'), 'UnitVectors[1][2]': mpf('0.0'), 'UnitVectors[2][0]': mpf('0.0'), 'UnitVectors[2][1]': mpf('0.0'), 'UnitVectors[2][2]': mpf('1.0'), 'ReU[0]': mpf('1.0'), 'ReU[1]': mpf('1.0'), 'ReU[2]': mpf('1.0'), 'ReDD[0][0]': mpf('1.0'), 'ReDD[0][1]': mpf('1.0'), 'ReDD[0][2]': mpf('1.0'), 'ReDD[1][0]': mpf('1.0'), 'ReDD[1][1]': mpf('1.0'), 'ReDD[1][2]': mpf('1.0'), 'ReDD[2][0]': mpf('1.0'), 'ReDD[2][1]': mpf('1.0'), 'ReDD[2][2]': mpf('1.0'), 'ghatDD[0][0]': mpf('1.0'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('1.0'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('1.0'), 'ghatUU[0][0]': mpf('1.0'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('1.0'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('1.0'), 'detgammahat': mpf('1.0'), 'detgammahatdD[0]': mpf('0.0'), 'detgammahatdD[1]': mpf('0.0'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('0.0'), 'detgammahatdDD[0][1]': mpf('0.0'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.0'), 'detgammahatdDD[1][1]': mpf('0.0'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('0.0'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('0.0'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('0.0'), 'ReUdD[2][1]': mpf('0.0'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('0.0'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('0.0'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('0.0'), 'ReUdDD[2][0][1]': mpf('0.0'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('0.0'), 'ReUdDD[2][1][1]': mpf('0.0'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.0'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.0'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.0'), 'ReDDdD[0][2][1]': mpf('0.0'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('0.0'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.0'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.0'), 'ReDDdD[1][2][1]': mpf('0.0'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.0'), 'ReDDdD[2][0][1]': mpf('0.0'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.0'), 'ReDDdD[2][1][1]': mpf('0.0'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.0'), 'ReDDdD[2][2][1]': mpf('0.0'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('0.0'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.0'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.0'), 'ReDDdDD[0][2][0][1]': mpf('0.0'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('0.0'), 'ReDDdDD[0][2][1][1]': mpf('0.0'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('0.0'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('0.0'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.0'), 'ReDDdDD[1][2][0][1]': mpf('0.0'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.0'), 'ReDDdDD[1][2][1][1]': mpf('0.0'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('0.0'), 'ReDDdDD[2][0][0][1]': mpf('0.0'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('0.0'), 'ReDDdDD[2][0][1][1]': mpf('0.0'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('0.0'), 'ReDDdDD[2][1][0][1]': mpf('0.0'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.0'), 'ReDDdDD[2][1][1][1]': mpf('0.0'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.0'), 'ReDDdDD[2][2][0][1]': mpf('0.0'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.0'), 'ReDDdDD[2][2][1][1]': mpf('0.0'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.0'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.0'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.0'), 'ghatDDdD[2][2][1]': mpf('0.0'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('0.0'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('0.0'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.0'), 'ghatDDdDD[2][2][0][1]': mpf('0.0'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.0'), 'ghatDDdDD[2][2][1][1]': mpf('0.0'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('0.0'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('0.0'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('0.0'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('0.0'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('0.0'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('0.0'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('0.0'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('0.0'), 'GammahatUDD[2][2][0]': mpf('0.0'), 'GammahatUDD[2][2][1]': mpf('0.0'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.0'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('0.0'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.0'), 'GammahatUDDdD[0][2][2][1]': mpf('0.0'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('0.0'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('0.0'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('0.0'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('0.0'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('0.0'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('0.0'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('0.0'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.449832882742496220629391245893203'), 'Cart_to_xx[1]': mpf('0.0470781072226313934692143448046409'), 'Cart_to_xx[2]': mpf('0.226312034922729443664479731523897'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.411876352734953155021457860129885'), 'xxCart[1]': mpf('0.462859757962336137993020201975014'), 'xxCart[2]': mpf('0.540732016614090760242561373161152'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.822357829217148799170646219440382'), 'xxSph[1]': mpf('0.853249206618316744644662795398142'), 'xxSph[2]': mpf('0.843616625759465059323443162593044'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('1.0'), 'scalefactor_orthog[1]': mpf('1.0'), 'scalefactor_orthog[2]': mpf('1.0'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_Cylindrical__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('-3.14159265358979323846264338327933'), 'xxmin[2]': mpf('0.141690440514945614758346437156433'), 'xxmax[0]': mpf('0.718230102950035198539069369871868'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('0.719765795081978021485724639205728'), 'UnitVectors[0][0]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[0][1]': mpf('0.446508781386790105535572873400882'), 'UnitVectors[0][2]': mpf('0.0'), 'UnitVectors[1][0]': mpf('-0.446508781386790105535572873400882'), 'UnitVectors[1][1]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[1][2]': mpf('0.0'), 'UnitVectors[2][0]': mpf('0.0'), 'UnitVectors[2][1]': mpf('0.0'), 'UnitVectors[2][2]': mpf('1.0'), 'ReU[0]': mpf('1.0'), 'ReU[1]': mpf('2.42791311848755422950231765355056'), 'ReU[2]': mpf('1.0'), 'ReDD[0][0]': mpf('1.0'), 'ReDD[0][1]': mpf('0.411876352734953155021457860129885'), 'ReDD[0][2]': mpf('1.0'), 'ReDD[1][0]': mpf('0.411876352734953155021457860129885'), 'ReDD[1][1]': mpf('0.169642129942247553302413230380042'), 'ReDD[1][2]': mpf('0.411876352734953155021457860129885'), 'ReDD[2][0]': mpf('1.0'), 'ReDD[2][1]': mpf('0.411876352734953155021457860129885'), 'ReDD[2][2]': mpf('1.0'), 'ghatDD[0][0]': mpf('1.0'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.169642129942247553302413230380042'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('1.0'), 'ghatUU[0][0]': mpf('1.0'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('5.89476211092396054332782841162283'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('1.0'), 'detgammahat': mpf('0.169642129942247553302413230380042'), 'detgammahatdD[0]': mpf('0.823752705469906310042915720259771'), 'detgammahatdD[1]': mpf('0.0'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('2.0'), 'detgammahatdDD[0][1]': mpf('0.0'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.0'), 'detgammahatdDD[1][1]': mpf('0.0'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('0.0'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-5.89476211092396054332782841162283'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('0.0'), 'ReUdD[2][1]': mpf('0.0'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('0.0'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('28.6239405189513422055020582143196'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('0.0'), 'ReUdDD[2][0][1]': mpf('0.0'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('0.0'), 'ReUdDD[2][1][1]': mpf('0.0'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.0'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('1.0'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.0'), 'ReDDdD[0][2][1]': mpf('0.0'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('1.0'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.823752705469906310042915720259771'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('1.0'), 'ReDDdD[1][2][1]': mpf('0.0'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.0'), 'ReDDdD[2][0][1]': mpf('0.0'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('1.0'), 'ReDDdD[2][1][1]': mpf('0.0'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.0'), 'ReDDdD[2][2][1]': mpf('0.0'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('0.0'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.0'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.0'), 'ReDDdDD[0][2][0][1]': mpf('0.0'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('0.0'), 'ReDDdDD[0][2][1][1]': mpf('0.0'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('0.0'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('2.0'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.0'), 'ReDDdDD[1][2][0][1]': mpf('0.0'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.0'), 'ReDDdDD[1][2][1][1]': mpf('0.0'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('0.0'), 'ReDDdDD[2][0][0][1]': mpf('0.0'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('0.0'), 'ReDDdDD[2][0][1][1]': mpf('0.0'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('0.0'), 'ReDDdDD[2][1][0][1]': mpf('0.0'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.0'), 'ReDDdDD[2][1][1][1]': mpf('0.0'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.0'), 'ReDDdDD[2][2][0][1]': mpf('0.0'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.0'), 'ReDDdDD[2][2][1][1]': mpf('0.0'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.0'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.823752705469906310042915720259771'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.0'), 'ghatDDdD[2][2][1]': mpf('0.0'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('0.0'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('2.0'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.0'), 'ghatDDdDD[2][2][0][1]': mpf('0.0'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.0'), 'ghatDDdDD[2][2][1][1]': mpf('0.0'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('0.0'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-0.411876352734953155021457860129885'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('0.0'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('0.0'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('0.0'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('0.0'), 'GammahatUDD[2][2][0]': mpf('0.0'), 'GammahatUDD[2][2][1]': mpf('0.0'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.0'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-1.0'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.0'), 'GammahatUDDdD[0][2][2][1]': mpf('0.0'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('0.0'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('0.0'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('0.0'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('0.0'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('0.0'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.452289697623204692344274946859728'), 'Cart_to_xx[1]': mpf('0.104277269822046439469449849362747'), 'Cart_to_xx[2]': mpf('0.226312034922729443664479731523897'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.368538414433415914206730331101263'), 'xxCart[1]': mpf('0.183906408341719647282694547690429'), 'xxCart[2]': mpf('0.540732016614090760242561373161152'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.679730272780158644057520291610636'), 'xxSph[1]': mpf('0.65094806122227230752427407191122'), 'xxSph[2]': mpf('0.462859757962336137993020201975014'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('1.0'), 'scalefactor_orthog[1]': mpf('0.411876352734953155021457860129885'), 'scalefactor_orthog[2]': mpf('1.0'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_NobleSphericalThetaOptionOne__reference_metric__False__globals'] = {'UnitVectors[0][0]': mpf('-0.0545695741961323835876273379455954'), 'UnitVectors[0][1]': mpf('-0.0327649421501787156483604597716725'), 'UnitVectors[0][2]': mpf('-0.997972254192444092161241005876128'), 'UnitVectors[1][0]': mpf('-0.855593643946423490568160200268831'), 'UnitVectors[1][1]': mpf('-0.513719900895837352176829171485406'), 'UnitVectors[1][2]': mpf('0.0636504506036819437431985637800144'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('1.64018338816322634127071327247782'), 'ReU[1]': mpf('1.06081494065935250690878300819572'), 'ReU[2]': mpf('-16.6662597137684407254322094522362'), 'ReDD[0][0]': mpf('0.371719361022243224141152433162834'), 'ReDD[0][1]': mpf('0.574735420513948630555562605406318'), 'ReDD[0][2]': mpf('-0.036582168493609457405670416812726'), 'ReDD[1][0]': mpf('0.574735420513948630555562605406318'), 'ReDD[1][1]': mpf('0.888629536769217082509967126497512'), 'ReDD[1][2]': mpf('-0.0565616704351018194739056435045649'), 'ReDD[2][0]': mpf('-0.036582168493609457405670416812726'), 'ReDD[2][1]': mpf('-0.0565616704351018194739056435045649'), 'ReDD[2][2]': mpf('0.00360017581009118575495733148181383'), 'ghatDD[0][0]': mpf('0.371719361022243224141152433162834'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.888629536769217082509967126497512'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.00360017581009118575495733148181383'), 'ghatUU[0][0]': mpf('2.69020154680660081089912827934256'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('1.12532833832610558054532619299147'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('277.764212846780907777789154961303'), 'detgammahat': mpf('0.00118921296666664360954678070193038'), 'detgammahatdD[0]': mpf('0.00545499625289626955616805536653528'), 'detgammahatdD[1]': mpf('0.143528487907169511049418358571704'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('0.0261091672168909923789511359079395'), 'detgammahatdDD[0][1]': mpf('0.658374391856888095716102500489699'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.658374391856888095716102500489699'), 'detgammahatdDD[1][1]': mpf('7.67676277002538422457597067221396'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('-1.64018338816322634127071327247782'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-0.686099095044679318208066027003533'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('10.7791710590810965408973137311023'), 'ReUdD[2][1]': mpf('1005.74208440156587327154878106824'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('1.64018338816322634127071327247782'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('0.201392116058631740226496433448118'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('-3.16403284106494525435555333666607'), 'ReUdDD[2][0][1]': mpf('-650.479841024267926492190785073261'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('-650.479841024267926492190785073261'), 'ReUdDD[2][1][1]': mpf('-128284.423506282789300917146750942'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.743438722044486448282304866325669'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.946454781536191854696715038569201'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('-0.0602422733207879650205931837112094'), 'ReDDdD[0][2][1]': mpf('-2.20758748660906978156644046873777'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('0.946454781536191854696715038569201'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('1.14947084102789726111112521081264'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('-0.0731643369872189148113408336254519'), 'ReDDdD[1][2][1]': mpf('-3.41327048165691999538961340273616'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('-0.0602422733207879650205931837112094'), 'ReDDdD[2][0][1]': mpf('-2.20758748660906978156644046873777'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('-0.0731643369872189148113408336254519'), 'ReDDdD[2][1][1]': mpf('-3.41327048165691999538961340273616'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.00465694301735611734138021999211459'), 'ReDDdD[2][2][1]': mpf('0.434512408379418924416959720765367'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('1.48687744408897289656460973265134'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('1.68989350358067830297901990489477'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('-0.107562482975144980250438717508164'), 'ReDDdDD[0][2][0][1]': mpf('-3.63538013803328731346548953043409'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('-3.63538013803328731346548953043409'), 'ReDDdDD[0][2][1][1]': mpf('15.1440766905750873064523144968993'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('1.68989350358067830297901990489477'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('1.8929095630723837093934300771384'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('-0.120484546641575930041186367422419'), 'ReDDdDD[1][2][0][1]': mpf('-4.41517497321813956313288093747554'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('-4.41517497321813956313288093747554'), 'ReDDdDD[1][2][1][1]': mpf('23.4150765274029767991524144543261'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('-0.107562482975144980250438717508164'), 'ReDDdDD[2][0][0][1]': mpf('-3.63538013803328731346548953043409'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('-3.63538013803328731346548953043409'), 'ReDDdDD[2][0][1][1]': mpf('15.1440766905750873064523144968993'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('-0.120484546641575930041186367422419'), 'ReDDdDD[2][1][0][1]': mpf('-4.41517497321813956313288093747554'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('-4.41517497321813956313288093747554'), 'ReDDdDD[2][1][1][1]': mpf('23.4150765274029767991524144543261'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.00766889568451664196389465487130302'), 'ReDDdDD[2][2][0][1]': mpf('0.562055753078867882309724869434265'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.562055753078867882309724869434265'), 'ReDDdDD[2][2][1][1]': mpf('23.2403248191299814297708160718616'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.743438722044486448282304866325669'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('1.14947084102789726111112521081264'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.00465694301735611734138021999211459'), 'ghatDDdD[2][2][1]': mpf('0.434512408379418924416959720765367'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('1.48687744408897289656460973265134'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('1.8929095630723837093934300771384'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.00766889568451664196389465487130302'), 'ghatDDdDD[2][2][0][1]': mpf('0.562055753078867882309724869434265'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.562055753078867882309724869434265'), 'ghatDDdDD[2][2][1][1]': mpf('23.2403248191299814297708160718616'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('1.0'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-1.54615411727116677672415981139243'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.0062640576543408128592244775324058'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('-0.244484563251842846404301583730437'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('60.345998542834153083401086079265'), 'GammahatUDD[2][2][0]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[2][2][1]': mpf('60.345998542834153083401086079265'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.0'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('0.546154117271166776724159811392434'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.0022126777922890577425340422829363'), 'GammahatUDDdD[0][2][2][1]': mpf('-0.756021628162159631561651472250774'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('-13.0764980554352461127695958590461'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-4055.61381541891087214405195504767'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-4055.61381541891087214405195504767'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.0'), 'Cart_to_xx[1]': mpf('0.0'), 'Cart_to_xx[2]': mpf('0.0'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('-0.0514411817788072549425259459356483'), 'xxCart[1]': mpf('-0.0308865768140610593174513625192172'), 'xxCart[2]': mpf('-0.940759991155621953809678754986644'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.942671489315984926011759677305197'), 'xxSph[1]': mpf('3.20528616143060598384026761881878'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.609687921007332426011026933803767'), 'scalefactor_orthog[1]': mpf('0.942671489315984926011759677305197'), 'scalefactor_orthog[2]': mpf('-0.0600014650662063897154348268218265'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_NobleSphericalThetaOptionTwo__reference_metric__False__globals'] = {'UnitVectors[0][0]': mpf('0.523512699392270781486631530424035'), 'UnitVectors[0][1]': mpf('0.314330165905662513536749556425498'), 'UnitVectors[0][2]': mpf('-0.791916031140130795172530452738851'), 'UnitVectors[1][0]': mpf('-0.678935030444360279175005142958107'), 'UnitVectors[1][1]': mpf('-0.407649634873198663760522650689343'), 'UnitVectors[1][2]': mpf('-0.610630002229880113767903219418943'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('1.64018338816322634127071327247782'), 'ReU[1]': mpf('1.06081494065935250690878300819572'), 'ReU[2]': mpf('1.73724667439447897248269936994164'), 'ReDD[0][0]': mpf('0.371719361022243224141152433162834'), 'ReDD[0][1]': mpf('0.574735420513948630555562605406318'), 'ReDD[0][2]': mpf('0.350950691110023537158230663845447'), 'ReDD[1][0]': mpf('0.574735420513948630555562605406318'), 'ReDD[1][1]': mpf('0.888629536769217082509967126497512'), 'ReDD[1][2]': mpf('0.542623856018924359641522151477897'), 'ReDD[2][0]': mpf('0.350950691110023537158230663845447'), 'ReDD[2][1]': mpf('0.542623856018924359641522151477897'), 'ReDD[2][2]': mpf('0.331342406410821927520560058543292'), 'ghatDD[0][0]': mpf('0.371719361022243224141152433162834'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.888629536769217082509967126497512'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.331342406410821927520560058543292'), 'ghatUU[0][0]': mpf('2.69020154680660081089912827934256'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('1.12532833832610558054532619299147'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('3.0180260076946768419726276656777'), 'detgammahat': mpf('0.109449289950175527843607816560045'), 'detgammahatdD[0]': mpf('0.502050922160628207218944737862551'), 'detgammahatdD[1]': mpf('-0.286869206645810331002438381151021'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('2.40295884183725128702666954714277'), 'detgammahatdDD[0][1]': mpf('-1.31588747447864019327894326668622'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('-1.31588747447864019327894326668622'), 'detgammahatdDD[1][1]': mpf('0.20976779435198582320567920911773'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('-1.64018338816322634127071327247782'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-0.686099095044679318208066027003533'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('-1.12359217945270206387067195969498'), 'ReUdD[2][1]': mpf('2.27668254156096204573500779749543'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('1.64018338816322634127071327247782'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('0.201392116058631740226496433448118'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('0.329810384886419143596174713014955'), 'ReUdDD[2][0][1]': mpf('-1.47248098758687593530720914282347'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('-1.47248098758687593530720914282347'), 'ReUdDD[2][1][1]': mpf('7.28607714604123252136513034515711'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.743438722044486448282304866325669'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.946454781536191854696715038569201'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.577933685359925528161554584318356'), 'ReDDdD[0][2][1]': mpf('-0.459925077523857583614383673029935'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('0.946454781536191854696715038569201'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('1.14947084102789726111112521081264'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.701901382220047074316461327690895'), 'ReDDdD[1][2][1]': mpf('-0.711115052249772969433804837899081'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.577933685359925528161554584318356'), 'ReDDdD[2][0][1]': mpf('-0.459925077523857583614383673029935'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.701901382220047074316461327690895'), 'ReDDdD[2][1][1]': mpf('-0.711115052249772969433804837899081'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.428602042590183279028547872591413'), 'ReDDdD[2][2][1]': mpf('-0.868456371881960363876312878942282'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('1.48687744408897289656460973265134'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('1.68989350358067830297901990489477'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('1.03189967385972951016820242526427'), 'ReDDdDD[0][2][0][1]': mpf('-0.757389005851770349491652992660078'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('-0.757389005851770349491652992660078'), 'ReDDdDD[0][2][1][1]': mpf('-0.266425619261103259441278550692552'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('1.68989350358067830297901990489477'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('1.8929095630723837093934300771384'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('1.15586737071985105632310916863671'), 'ReDDdDD[1][2][0][1]': mpf('-0.919850155047715167228767346059871'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('-0.919850155047715167228767346059871'), 'ReDDdDD[1][2][1][1]': mpf('-0.411935068167075078959023550157058'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('1.03189967385972951016820242526427'), 'ReDDdDD[2][0][0][1]': mpf('-0.757389005851770349491652992660078'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('-0.757389005851770349491652992660078'), 'ReDDdDD[2][0][1][1]': mpf('-0.266425619261103259441278550692552'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('1.15586737071985105632310916863671'), 'ReDDdDD[2][1][0][1]': mpf('-0.919850155047715167228767346059871'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('-0.919850155047715167228767346059871'), 'ReDDdDD[2][1][1][1]': mpf('-0.411935068167075078959023550157058'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.705807295160108314643968791658677'), 'ReDDdDD[2][2][0][1]': mpf('-1.12337620445588376190424943492008'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('-1.12337620445588376190424943492008'), 'ReDDdDD[2][2][1][1]': mpf('0.635042637551308623007757101048302'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.743438722044486448282304866325669'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('1.14947084102789726111112521081264'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.428602042590183279028547872591413'), 'ghatDDdD[2][2][1]': mpf('-0.868456371881960363876312878942282'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('1.48687744408897289656460973265134'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('1.8929095630723837093934300771384'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.705807295160108314643968791658677'), 'ghatDDdDD[2][2][0][1]': mpf('-1.12337620445588376190424943492008'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('-1.12337620445588376190424943492008'), 'ghatDDdDD[2][2][1][1]': mpf('0.635042637551308623007757101048302'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('1.0'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-1.54615411727116677672415981139243'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.576512938970289828386106706485123'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('0.488649282939322428896830396342299'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('-1.31051195844395822106593087205666'), 'GammahatUDD[2][2][0]': mpf('0.6467660557441173462071306785005'), 'GammahatUDD[2][2][1]': mpf('-1.31051195844395822106593087205666'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.0'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('0.546154117271166776724159811392434'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.203643939347026435673551898959971'), 'GammahatUDDdD[0][2][2][1]': mpf('1.51105420143647337126500424958847'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('-0.357315738040920735222046260909218'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-2.47659558838680094108499717027085'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('0.228459724881314639726509809967604'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-2.47659558838680094108499717027085'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.0'), 'Cart_to_xx[1]': mpf('0.0'), 'Cart_to_xx[2]': mpf('0.0'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.493500496011943414258879846649156'), 'xxCart[1]': mpf('0.296310085631231509384135274214934'), 'xxCart[2]': mpf('-0.746516664488070992847383895703115'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.942671489315984926011759677305197'), 'xxSph[1]': mpf('2.48473676472905039425500471121526'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.609687921007332426011026933803767'), 'scalefactor_orthog[1]': mpf('0.942671489315984926011759677305197'), 'scalefactor_orthog[2]': mpf('0.575623493623064283212248042774497'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_SinhCylindrical__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('-3.14159265358979323846264338327933'), 'xxmin[2]': mpf('-1.0'), 'xxmax[0]': mpf('1.0'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('1.0'), 'UnitVectors[0][0]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[0][1]': mpf('0.446508781386790105535572873400882'), 'UnitVectors[0][2]': mpf('0.0'), 'UnitVectors[1][0]': mpf('-0.446508781386790105535572873400882'), 'UnitVectors[1][1]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[1][2]': mpf('0.0'), 'UnitVectors[2][0]': mpf('0.0'), 'UnitVectors[2][1]': mpf('0.0'), 'UnitVectors[2][2]': mpf('1.0'), 'ReU[0]': mpf('2.42442737434577941989118954244311'), 'ReU[1]': mpf('6.21889030017028134462889208359826'), 'ReU[2]': mpf('1.76058227801499790356951296455967'), 'ReDD[0][0]': mpf('0.170130289877639224608690160335753'), 'ReDD[0][1]': mpf('0.066325101758658980054229341233097'), 'ReDD[0][2]': mpf('0.234279611430473286041287565852189'), 'ReDD[1][0]': mpf('0.066325101758658980054229341233097'), 'ReDD[1][1]': mpf('0.0258567661670377596263514735149345'), 'ReDD[1][2]': mpf('0.0913336424647303048806381528694874'), 'ReDD[2][0]': mpf('0.234279611430473286041287565852189'), 'ReDD[2][1]': mpf('0.0913336424647303048806381528694874'), 'ReDD[2][2]': mpf('0.322617074075928657518909105706452'), 'ghatDD[0][0]': mpf('0.170130289877639224608690160335753'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.0258567661670377596263514735149345'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.322617074075928657518909105706452'), 'ghatUU[0][0]': mpf('5.87784809347717005801964346346203'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('38.6745965655520120047961438931921'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('3.09964995766047937046349987010239'), 'detgammahat': mpf('0.0014191986783619635277493368842291'), 'detgammahatdD[0]': mpf('0.0083990086476463435473504450496519'), 'detgammahatdD[1]': mpf('0.0'), 'detgammahatdD[2]': mpf('0.00207276076309648915719588192801535'), 'detgammahatdDD[0][0]': mpf('0.0363268607858075883460465197088319'), 'detgammahatdDD[0][1]': mpf('0.0'), 'detgammahatdDD[0][2]': mpf('0.0122668769631627886923701597520783'), 'detgammahatdDD[1][0]': mpf('0.0'), 'detgammahatdDD[1][1]': mpf('0.0'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0122668769631627886923701597520783'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.00590864804250701275405050004299002'), 'ReUdD[0][0]': mpf('-0.95515333205160408977803075394175'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-15.9520540704949654648749671823417'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('0.0'), 'ReUdD[2][1]': mpf('0.0'), 'ReUdD[2][2]': mpf('-1.28567829216290496708797023556426'), 'ReUdDD[0][0][0]': mpf('-1.69745567962125130075701678253983'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('75.5524760169453838951105382522295'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('0.0'), 'ReUdDD[2][0][1]': mpf('0.0'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('0.0'), 'ReUdDD[2][1][1]': mpf('0.0'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('-0.848348618660138949687360581820449'), 'ReDDdD[0][0][0]': mpf('0.134052696301849349314138914934393'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.196260436160342252830192407516381'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.0922993007987940796589008432642318'), 'ReDDdD[0][2][1]': mpf('0.0'), 'ReDDdD[0][2][2]': mpf('0.171084427279435545809845654726797'), 'ReDDdD[1][0][0]': mpf('0.196260436160342252830192407516381'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.132650203517317960108458682466194'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.234279611430473286041287565852189'), 'ReDDdD[1][2][1]': mpf('0.0'), 'ReDDdD[1][2][2]': mpf('0.0666970711493618228945119379560682'), 'ReDDdD[2][0][0]': mpf('0.0922993007987940796589008432642318'), 'ReDDdD[2][0][1]': mpf('0.0'), 'ReDDdD[2][0][2]': mpf('0.171084427279435545809845654726797'), 'ReDDdD[2][1][0]': mpf('0.234279611430473286041287565852189'), 'ReDDdD[2][1][1]': mpf('0.0'), 'ReDDdD[2][1][2]': mpf('0.0666970711493618228945119379560682'), 'ReDDdD[2][2][0]': mpf('0.0'), 'ReDDdD[2][2][1]': mpf('0.0'), 'ReDDdD[2][2][2]': mpf('0.4711870316997476328280994981995'), 'ReDDdDD[0][0][0][0]': mpf('0.396670942781268879758242903901208'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.268105392603698698628277829868787'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.236756618294252124133665510676953'), 'ReDDdDD[0][2][0][1]': mpf('0.0'), 'ReDDdDD[0][2][0][2]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[0][2][1][0]': mpf('0.0'), 'ReDDdDD[0][2][1][1]': mpf('0.0'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.362760582832824736745416205582038'), 'ReDDdDD[1][0][0][0]': mpf('0.268105392603698698628277829868787'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('0.392520872320684505660384815032761'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.0922993007987940796589008432642318'), 'ReDDdDD[1][2][0][1]': mpf('0.0'), 'ReDDdDD[1][2][0][2]': mpf('0.171084427279435545809845654726797'), 'ReDDdDD[1][2][1][0]': mpf('0.0'), 'ReDDdDD[1][2][1][1]': mpf('0.0'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.171084427279435545809845654726797'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.141421804357836703824050107975003'), 'ReDDdDD[2][0][0][0]': mpf('0.236756618294252124133665510676953'), 'ReDDdDD[2][0][0][1]': mpf('0.0'), 'ReDDdDD[2][0][0][2]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[2][0][1][0]': mpf('0.0'), 'ReDDdDD[2][0][1][1]': mpf('0.0'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.362760582832824736745416205582038'), 'ReDDdDD[2][1][0][0]': mpf('0.0922993007987940796589008432642318'), 'ReDDdDD[2][1][0][1]': mpf('0.0'), 'ReDDdDD[2][1][0][2]': mpf('0.171084427279435545809845654726797'), 'ReDDdDD[2][1][1][0]': mpf('0.0'), 'ReDDdDD[2][1][1][1]': mpf('0.0'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.171084427279435545809845654726797'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.141421804357836703824050107975003'), 'ReDDdDD[2][2][0][0]': mpf('0.0'), 'ReDDdDD[2][2][0][1]': mpf('0.0'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.0'), 'ReDDdDD[2][2][1][1]': mpf('0.0'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('1.34317398422202856582819055879823'), 'ghatDDdD[0][0][0]': mpf('0.134052696301849349314138914934369'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.132650203517317960108458682466169'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.0'), 'ghatDDdD[2][2][1]': mpf('0.0'), 'ghatDDdD[2][2][2]': mpf('0.4711870316997476328280994981995'), 'ghatDDdDD[0][0][0][0]': mpf('0.39667094278126887975824290390106'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('0.392520872320684505660384815032663'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.0'), 'ghatDDdDD[2][2][0][1]': mpf('0.0'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.0'), 'ghatDDdDD[2][2][1][1]': mpf('0.0'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('1.34317398422202856582819055879823'), 'GammahatUDD[0][0][0]': mpf('0.393970692691649641555722688922703'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-0.38984887292181298480341503217931'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('0.0'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('2.56509655268532030140180718683855'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.56509655268532030140180718683855'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('0.0'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('0.0'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('0.0'), 'GammahatUDD[2][2][0]': mpf('0.0'), 'GammahatUDD[2][2][1]': mpf('0.0'), 'GammahatUDD[2][2][2]': mpf('0.730257431429144850537042435830856'), 'GammahatUDDdD[0][0][0][0]': mpf('0.855359958982459968316640998194212'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-0.846410969489934443265138659866741'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.0'), 'GammahatUDDdD[0][2][2][1]': mpf('0.0'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-5.56914745891571598014687640508426'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-5.56914745891571598014687640508426'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('0.0'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('0.0'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('0.0'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('0.0'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('0.0'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('1.01513275934724972833345051113401'), 'Cart_to_xx[0]': mpf('1.00947962924350724051480325367721'), 'Cart_to_xx[1]': mpf('0.104277269822046439469449849362747'), 'Cart_to_xx[2]': mpf('0.465601964122500227997148500620136'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.143880854602788743991103375842178'), 'xxCart[1]': mpf('0.0717987872168390092852362773750112'), 'xxCart[2]': mpf('0.26787615917052714212915253293425'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.312433037335988769363323526305739'), 'xxSph[1]': mpf('0.540624510550945451461516346697901'), 'xxSph[2]': mpf('0.462859757962336137993020201975014'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.412468531984731199577252870370972'), 'scalefactor_orthog[1]': mpf('0.160800392309962227984726836790536'), 'scalefactor_orthog[2]': mpf('0.567993903203131800586992020314654'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_SinhCylindricalv2__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('-3.14159265358979323846264338327933'), 'xxmin[2]': mpf('-1.0'), 'xxmax[0]': mpf('1.0'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('1.0'), 'UnitVectors[0][0]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[0][1]': mpf('0.446508781386790105535572873400882'), 'UnitVectors[0][2]': mpf('0.0'), 'UnitVectors[1][0]': mpf('-0.446508781386790105535572873400882'), 'UnitVectors[1][1]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[1][2]': mpf('0.0'), 'UnitVectors[2][0]': mpf('0.0'), 'UnitVectors[2][1]': mpf('0.0'), 'UnitVectors[2][2]': mpf('1.0'), 'ReU[0]': mpf('1.4742905086353521403210909058319'), 'ReU[1]': mpf('3.69977174667138271338558370600268'), 'ReU[2]': mpf('1.48091535857008933659432522100512'), 'ReDD[0][0]': mpf('0.460080536282589978433271482073728'), 'ReDD[0][1]': mpf('0.183333571445193195887434117543122'), 'ReDD[0][2]': mpf('0.458022373746070295742763459873437'), 'ReDD[1][0]': mpf('0.183333571445193195887434117543122'), 'ReDD[1][1]': mpf('0.0730550322567984879547218361284932'), 'ReDD[1][2]': mpf('0.182513431798592088528957576682782'), 'ReDD[2][0]': mpf('0.458022373746070295742763459873437'), 'ReDD[2][1]': mpf('0.182513431798592088528957576682782'), 'ReDD[2][2]': mpf('0.455973418365021609296092868892373'), 'ghatDD[0][0]': mpf('0.460080536282589978433271482073728'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.0730550322567984879547218361284932'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.455973418365021609296092868892373'), 'ghatUU[0][0]': mpf('2.17353250385228532382940913155681'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('13.6883109774678141039243519462021'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('2.19311029924877627231413712256536'), 'detgammahat': mpf('0.0153258130383879335082248197770614'), 'detgammahatdD[0]': mpf('0.084264382271346267570966395236431'), 'detgammahatdD[1]': mpf('0.0'), 'detgammahatdD[2]': mpf('0.0188279664353127806703232509215308'), 'detgammahatdDD[0][0]': mpf('0.305772115437855835920566118998286'), 'detgammahatdDD[0][1]': mpf('0.0'), 'detgammahatdDD[0][2]': mpf('0.103519921398189903442611291554724'), 'detgammahatdDD[1][0]': mpf('0.0'), 'detgammahatdDD[1][1]': mpf('0.0'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.103519921398189903442611291554724'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0514872720156230423803721368345611'), 'ReUdD[0][0]': mpf('-0.35320014746227294079161583273495'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-9.28467686476400673245521863517211'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('0.0'), 'ReUdD[2][1]': mpf('0.0'), 'ReUdD[2][2]': mpf('-0.909662169140936653573785969559611'), 'ReUdDD[0][0][0]': mpf('-0.736758067992823953940940119736813'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('44.3759358562498906246258655478182'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('0.0'), 'ReUdDD[2][0][1]': mpf('0.0'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('0.0'), 'ReUdDD[2][1][1]': mpf('0.0'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('-0.811279162558858852233166869812379'), 'ReDDdD[0][0][0]': mpf('0.220445715831064779184963583717713'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.50400230345801942213084092925318'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.109729777815550630674319018165059'), 'ReDDdD[0][2][1]': mpf('0.0'), 'ReDDdD[0][2][2]': mpf('0.281343308113994372655811715985134'), 'ReDDdD[1][0][0]': mpf('0.50400230345801942213084092925318'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.366667142890386391774868235086244'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.458022373746070295742763459873437'), 'ReDDdD[1][2][1]': mpf('0.0'), 'ReDDdD[1][2][2]': mpf('0.112110096844137740363722067649559'), 'ReDDdD[2][0][0]': mpf('0.109729777815550630674319018165059'), 'ReDDdD[2][0][1]': mpf('0.0'), 'ReDDdD[2][0][2]': mpf('0.281343308113994372655811715985134'), 'ReDDdD[2][1][0]': mpf('0.458022373746070295742763459873437'), 'ReDDdD[2][1][1]': mpf('0.0'), 'ReDDdD[2][1][2]': mpf('0.112110096844137740363722067649559'), 'ReDDdD[2][2][0]': mpf('0.0'), 'ReDDdD[2][2][1]': mpf('0.0'), 'ReDDdD[2][2][2]': mpf('0.56016944711955650378598001550361'), 'ReDDdDD[0][0][0][0]': mpf('0.618277379351734932367867200026215'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.443332147316138492644124203185809'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.281467474801595059129427642474179'), 'ReDDdDD[0][2][0][1]': mpf('0.0'), 'ReDDdDD[0][2][0][2]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[0][2][1][0]': mpf('0.0'), 'ReDDdDD[0][2][1][1]': mpf('0.0'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.596549107656949758401873098842612'), 'ReDDdDD[1][0][0][0]': mpf('0.443332147316138492644124203185809'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('1.00800460691603884426168185850636'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.109729777815550630674319018165059'), 'ReDDdDD[1][2][0][1]': mpf('0.0'), 'ReDDdDD[1][2][0][2]': mpf('0.281343308113994372655811715985134'), 'ReDDdDD[1][2][1][0]': mpf('0.0'), 'ReDDdDD[1][2][1][1]': mpf('0.0'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.281343308113994372655811715985134'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.237713769273682373263135676662962'), 'ReDDdDD[2][0][0][0]': mpf('0.281467474801595059129427642474179'), 'ReDDdDD[2][0][0][1]': mpf('0.0'), 'ReDDdDD[2][0][0][2]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[2][0][1][0]': mpf('0.0'), 'ReDDdDD[2][0][1][1]': mpf('0.0'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0674022503240333821556902929084716'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.596549107656949758401873098842612'), 'ReDDdDD[2][1][0][0]': mpf('0.109729777815550630674319018165059'), 'ReDDdDD[2][1][0][1]': mpf('0.0'), 'ReDDdDD[2][1][0][2]': mpf('0.281343308113994372655811715985134'), 'ReDDdDD[2][1][1][0]': mpf('0.0'), 'ReDDdDD[2][1][1][1]': mpf('0.0'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.281343308113994372655811715985134'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.237713769273682373263135676662962'), 'ReDDdDD[2][2][0][0]': mpf('0.0'), 'ReDDdDD[2][2][0][1]': mpf('0.0'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.0'), 'ReDDdDD[2][2][1][1]': mpf('0.0'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('1.53184874201772181807982445638381'), 'ghatDDdD[0][0][0]': mpf('0.220445715831064779184963583717713'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.366667142890386391774868235086293'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.0'), 'ghatDDdD[2][2][1]': mpf('0.0'), 'ghatDDdD[2][2][2]': mpf('0.56016944711955650378598001550361'), 'ghatDDdDD[0][0][0][0]': mpf('0.618277379351734932367867200026215'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('1.00800460691603884426168185850636'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.0'), 'ghatDDdDD[2][2][0][1]': mpf('0.0'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.0'), 'ghatDDdDD[2][2][1][1]': mpf('0.0'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('1.53184874201772181807982445638362'), 'GammahatUDD[0][0][0]': mpf('0.239572964346901801480460573283098'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-0.398481476583452606683752581783008'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('0.0'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('2.50952693855161780759655989969539'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.50952693855161780759655989969539'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('0.0'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('0.0'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('0.0'), 'GammahatUDD[2][2][0]': mpf('0.0'), 'GammahatUDD[2][2][1]': mpf('0.0'), 'GammahatUDD[2][2][2]': mpf('0.614256691901196059842069843690625'), 'GammahatUDDdD[0][0][0][0]': mpf('0.55713257971687907727849314663157'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-0.904534611417571723579853508600824'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.0'), 'GammahatUDDdD[0][2][2][1]': mpf('0.0'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-5.69651064753903897858402754262962'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-5.69651064753903897858402754262962'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('0.0'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('0.0'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('0.0'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('0.0'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('0.0'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.925134059414371887500448387977357'), 'Cart_to_xx[0]': mpf('0.0'), 'Cart_to_xx[1]': mpf('0.0'), 'Cart_to_xx[2]': mpf('0.0'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.24184714959092006180930816339851'), 'xxCart[1]': mpf('0.120685494122307930891869095551719'), 'xxCart[2]': mpf('0.325877311282355567501584114029747'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.423380507658791201465294544300084'), 'xxSph[1]': mpf('0.692420291444873681426031737946003'), 'xxSph[2]': mpf('0.462859757962336137993020201975014'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.678292367849285164307315721099046'), 'scalefactor_orthog[1]': mpf('0.270286944295869547933441888214479'), 'scalefactor_orthog[2]': mpf('0.675258038356465334788821376448638'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_SinhSpherical__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('0.0'), 'xxmin[2]': mpf('-3.14159265358979323846264338327933'), 'xxmax[0]': mpf('1.0'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('3.14159265358979323846264338327933'), 'UnitVectors[0][0]': mpf('0.382806309209406061575292302010405'), 'UnitVectors[0][1]': mpf('0.229846517234847206916722007015529'), 'UnitVectors[0][2]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[1][0]': mpf('0.767122970337180743799689847271659'), 'UnitVectors[1][1]': mpf('0.460599887674263978933755506836872'), 'UnitVectors[1][2]': mpf('-0.446508781386790105535572873400882'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('247.833707114625064366037653030213'), 'ReU[1]': mpf('3393.69399218618229232762010509358'), 'ReU[2]': mpf('7600.50895672392304157688454187031'), 'ReDD[0][0]': mpf('0.0000162809316746568057796281259824413'), 'ReDD[0][1]': mpf('0.00000118895918768764282364418631665124'), 'ReDD[0][2]': mpf('0.00000053088071801303725563195597345089'), 'ReDD[1][0]': mpf('0.00000118895918768764282364418631665124'), 'ReDD[1][1]': mpf('0.0000000868269690110752208228657012114203'), 'ReDD[1][2]': mpf('0.0000000387690041246437848560512812815462'), 'ReDD[2][0]': mpf('0.00000053088071801303725563195597345089'), 'ReDD[2][1]': mpf('0.0000000387690041246437848560512812815462'), 'ReDD[2][2]': mpf('0.0000000173107007872741356321817476485075'), 'ghatDD[0][0]': mpf('0.0000162809316746568057796281259824413'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.0000000868269690110752208228657012114203'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.0000000173107007872741356321817476485075'), 'ghatUU[0][0]': mpf('61421.546382177758247036338285014'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('11517158.9126005875178127279386043'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('57767736.4012405770584438072162531'), 'detgammahat': mpf('2.44708212239471016412181307489409e-20'), 'detgammahatdD[0]': mpf('2.01050329749143394094791746779037e-18'), 'detgammahatdD[1]': mpf('9.80763828196756856777581163064267e-20'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('1.65180895641353753903933665843263e-16'), 'detgammahatdDD[0][1]': mpf('8.05787796251101549743149910865712e-18'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('8.05787796251101549743149910865712e-18'), 'detgammahatdDD[1][1]': mpf('1.47598080901857184259247870358261e-19'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('-3393.52259528307712384582810142583'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-46471.317589071165965293283595822'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('-104077.051843724417367791534876966'), 'ReUdD[2][1]': mpf('-15231.0055155515676157444468219751'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('46464.2767906763045979767539354625'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('636383.961597198351295639905825777'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('1425243.99995154423209527002119581'), 'ReUdDD[2][0][1]': mpf('208564.736874856230578464485084313'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('208564.736874856230578464485084313'), 'ReUdDD[2][1][1]': mpf('68644.71805788786337344783855962'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.000445861139337712345143082948286335'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.0000325610410885926602842306968348126'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.0000145387907771527102680638416768012'), 'ReDDdD[0][2][1]': mpf('0.00000106385601151134240103601729358559'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('0.0000325610410885926602842306968348126'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.00000237791837537528564728837263330247'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.00000106176143602607451126391194690178'), 'ReDDdD[1][2][1]': mpf('0.0000000776909703043640066196920005730217'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.0000145387907771527102680638416768012'), 'ReDDdD[2][0][1]': mpf('0.00000106385601151134240103601729358559'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.00000106176143602607451126391194690178'), 'ReDDdD[2][1][1]': mpf('0.0000000776909703043640066196920005730217'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.000000474085804923490832133746858810849'), 'ReDDdD[2][2][1]': mpf('0.0000000693794009507177403584412361063517'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('0.0122104299526183605133016516917922'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.000891722278675424690286165896572573'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.000398161827986815527394267515583531'), 'ReDDdDD[0][2][0][1]': mpf('0.0000291349439592939458681765182213194'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('0.0000291349439592939458681765182213194'), 'ReDDdDD[0][2][1][1]': mpf('-0.00000053088071801303725563195597345089'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('0.000891722278675424690286165896572573'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('0.0000651220821771853205684613936696373'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.0000290775815543054205361276833536055'), 'ReDDdDD[1][2][0][1]': mpf('0.00000212771202302268480207203458717118'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.00000212771202302268480207203458717118'), 'ReDDdDD[1][2][1][1]': mpf('-0.0000000387690041246437848560512812815462'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('0.000398161827986815527394267515583531'), 'ReDDdDD[2][0][0][1]': mpf('0.0000291349439592939458681765182213194'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('0.0000291349439592939458681765182213194'), 'ReDDdDD[2][0][1][1]': mpf('-0.00000053088071801303725563195597345089'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('0.0000290775815543054205361276833536055'), 'ReDDdDD[2][1][0][1]': mpf('0.00000212771202302268480207203458717118'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.00000212771202302268480207203458717118'), 'ReDDdDD[2][1][1][1]': mpf('-0.0000000387690041246437848560512812815462'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.0000129833955054879194633877590703987'), 'ReDDdDD[2][2][0][1]': mpf('0.00000190008420508376176850932390324571'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.00000190008420508376176850932390324571'), 'ReDDdDD[2][2][1][1]': mpf('0.000000104411134873053899117004411828811'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.000445861139337712345143082948286335'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.00000237791837537528564728837263330285'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.000000474085804923490832133746858810849'), 'ghatDDdD[2][2][1]': mpf('0.0000000693794009507177403584412361063399'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('0.0122104299526183605133016516917922'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('0.0000651220821771853205684613936696253'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.0000129833955054879194633877590703957'), 'ghatDDdDD[2][2][0][1]': mpf('0.00000190008420508376176850932390324609'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.00000190008420508376176850932390324628'), 'ghatDDdDD[2][2][1][1]': mpf('0.000000104411134873053899117004411828799'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('13.692740324920959531765079025631'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-0.073027711893072944321711940222569'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.0145595416281201343844413599402184'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('13.6934319051950902659204434785403'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('13.6934319051950902659204434785403'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('-0.399526793005224249185024570708187'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('13.6934319051950902659204434785403'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('2.0039454728985211544890548692019'), 'GammahatUDD[2][2][0]': mpf('13.6934319051950902659204434785403'), 'GammahatUDD[2][2][1]': mpf('2.0039454728985211544890548692019'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.0094696291075097950623243311965229'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-0.0000505045250101516617685357912163476'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('-0.0000100690917903931580197546542581525'), 'GammahatUDDdD[0][2][2][1]': mpf('-0.0583530550662978146485254544521022'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-0.00947010739078536180372632438284589'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-0.00947010739078536180372632438284589'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('-0.601259816288967363598586845163865'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('-0.00947010739078536180372632438284589'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-5.01579745835047758287644822246028'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('-0.00947010739078536180372632438284589'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-5.01579745835047758287644822246028'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.955796757460684468367345260302451'), 'Cart_to_xx[1]': mpf('1.10685304642936859912503384510848'), 'Cart_to_xx[2]': mpf('0.104277269822046439469449849362747'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.000112799300729765039225867850617427'), 'xxCart[1]': mpf('0.000067727531640760125382027627792032'), 'xxCart[2]': mpf('0.000263659379169035223590720523445735'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.000294664163092621802492973845133577'), 'xxSph[1]': mpf('0.462859757962336137993020201975014'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.00403496365221011697029086110355342'), 'scalefactor_orthog[1]': mpf('0.000294664163092621802492973845133577'), 'scalefactor_orthog[2]': mpf('0.000131570136380844933865306956882799'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_SinhSphericalv2__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('0.0'), 'xxmin[2]': mpf('-3.14159265358979323846264338327933'), 'xxmax[0]': mpf('1.0'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('3.14159265358979323846264338327933'), 'UnitVectors[0][0]': mpf('0.382806309209406061575292302010405'), 'UnitVectors[0][1]': mpf('0.229846517234847206916722007015529'), 'UnitVectors[0][2]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[1][0]': mpf('0.767122970337180743799689847271659'), 'UnitVectors[1][1]': mpf('0.460599887674263978933755506836872'), 'UnitVectors[1][2]': mpf('-0.446508781386790105535572873400882'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('5.06509566609258253688496652408591'), 'ReU[1]': mpf('12.5079179868440953248321015715175'), 'ReU[2]': mpf('28.0127032395563546206438772111089'), 'ReDD[0][0]': mpf('0.0389784616651962506629890228362308'), 'ReDD[0][1]': mpf('0.0157843725437757984726670299848942'), 'ReDD[0][2]': mpf('0.00704786094947644003535205732324743'), 'ReDD[1][0]': mpf('0.0157843725437757984726670299848942'), 'ReDD[1][1]': mpf('0.00639189967887227692421035582474637'), 'ReDD[1][2]': mpf('0.00285403933635987537548739771749194'), 'ReDD[2][0]': mpf('0.00704786094947644003535205732324743'), 'ReDD[2][1]': mpf('0.00285403933635987537548739771749194'), 'ReDD[2][2]': mpf('0.00127435362610801110733410779814474'), 'ghatDD[0][0]': mpf('0.0389784616651962506629890228362308'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.00639189967887227692421035582474637'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.00127435362610801110733410779814474'), 'ghatUU[0][0]': mpf('25.6551941066698623686552296895304'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('156.448012365618046387444408270475'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('784.711542787451084888796241297983'), 'detgammahat': mpf('0.000000317500639426923387985184160619648'), 'detgammahatdD[0]': mpf('0.00000331388910259155137460961354941193'), 'detgammahatdD[1]': mpf('0.00000127250793804393767866540267337681'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('0.000030105129997696056390201723883476'), 'detgammahatdDD[0][1]': mpf('0.00001328170612965216460982713914679'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.00001328170612965216460982713914679'), 'detgammahatdDD[1][1]': mpf('0.00000191503524281673397382908195806805'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('-1.41744202182150547575120560918498'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-30.8874743300373441862648264490669'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('-69.1755137135386816424730764846296'), 'ReUdD[2][1]': mpf('-56.1359298405586945860310385823226'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('-18.6163174982017699195717065073564'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('143.905433474406488457453594191871'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('322.290264992006512965100338285432'), 'ReUdDD[2][0][1]': mpf('138.623957541675408618545667967503'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('138.623957541675408618545667967503'), 'ReUdDD[2][1][1]': mpf('252.999388181429551529770273827891'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.0218158602136846331992780808119476'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('0.043395640412955853910659000673585'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.019376534518288259271573399220949'), 'ReDDdD[0][2][1]': mpf('0.0141235290433215849365251725099841'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('0.043395640412955853910659000673585'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.0315687450875515969453340599697884'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.0140957218989528800707041146464949'), 'ReDDdD[1][2][1]': mpf('0.00571933920757267194065552166505176'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.019376534518288259271573399220949'), 'ReDDdD[2][0][1]': mpf('0.0141235290433215849365251725099841'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.0140957218989528800707041146464949'), 'ReDDdD[2][1][1]': mpf('0.00571933920757267194065552166505176'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.00629386360786854141770535819517249'), 'ReDDdD[2][2][1]': mpf('0.00510747035982192706565154965598437'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('0.304839057091270032377957654390805'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.0932101267160479967572213651724233'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.0416191400928908789190431899564271'), 'ReDDdDD[0][2][0][1]': mpf('0.0388295186283856845234009210642454'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('0.0388295186283856845234009210642454'), 'ReDDdDD[0][2][1][1]': mpf('-0.00704786094947644003535205732324743'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('0.0932101267160479967572213651724233'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('0.08679128082591170782131800134717'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.0387530690365765185431467984418979'), 'ReDDdDD[1][2][0][1]': mpf('0.0282470580866431698730503450199681'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.0282470580866431698730503450199681'), 'ReDDdDD[1][2][1][1]': mpf('-0.00285403933635987537548739771749194'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('0.0416191400928908789190431899564271'), 'ReDDdDD[2][0][0][1]': mpf('0.0388295186283856845234009210642454'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('0.0388295186283856845234009210642454'), 'ReDDdDD[2][0][1][1]': mpf('-0.00704786094947644003535205732324743'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('0.0387530690365765185431467984418979'), 'ReDDdDD[2][1][0][1]': mpf('0.0282470580866431698730503450199681'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.0282470580866431698730503450199681'), 'ReDDdDD[2][1][1][1]': mpf('-0.00285403933635987537548739771749194'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.0173035856305199293704091458612069'), 'ReDDdDD[2][2][0][1]': mpf('0.0252251189680578334807876062876196'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.0252251189680578334807876062876196'), 'ReDDdDD[2][2][1][1]': mpf('0.00768638485331250941908428045691379'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.0218158602136846331992780808119507'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.0315687450875515969453340599697884'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.00629386360786854141770535819517403'), 'ghatDDdD[2][2][1]': mpf('0.00510747035982192706565154965598591'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('0.304839057091270032377957654390854'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('0.0867912808259117078213180013470961'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.0173035856305199293704091458611791'), 'ghatDDdDD[2][2][0][1]': mpf('0.0252251189680578334807876062876227'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.0252251189680578334807876062875611'), 'ghatDDdDD[2][2][1][1]': mpf('0.0076863848533125094190842804569161'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('0.279845064193027762996940558008213'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-0.40495114146255844914132121931631'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.0807351462703864606933455802489757'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('2.4694337109121580976683169588921'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.4694337109121580976683169588921'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('-0.399526793005224249185024570708237'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('2.4694337109121580976683169588921'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][0]': mpf('2.4694337109121580976683169588921'), 'GammahatUDD[2][2][1]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('3.75372607057897466694645457438201'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-0.886676421822370464236730937168069'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('-0.176776759664846533123440311982792'), 'GammahatUDDdD[0][2][2][1]': mpf('-0.323577661744681744862492422317115'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-5.40704401723875236498854510160585'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-5.40704401723875236498854510160585'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('-0.601259816288967363598586845163963'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('-5.40704401723875236498854510160585'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('-5.40704401723875236498854510160585'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.0'), 'Cart_to_xx[1]': mpf('0.0'), 'Cart_to_xx[2]': mpf('0.0'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.0306051182628511059965057012269052'), 'xxCart[1]': mpf('0.0183760812532190553365645466400591'), 'xxCart[2]': mpf('0.0715370257472608782548437529824701'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.0799493569634695493728353687805186'), 'xxSph[1]': mpf('0.462859757962336137993020201975014'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.197429637251341397137392611731467'), 'scalefactor_orthog[1]': mpf('0.0799493569634695493728353687805186'), 'scalefactor_orthog[2]': mpf('0.0356980899504162702384856100295795'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_Spherical__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('0.0'), 'xxmin[2]': mpf('-3.14159265358979323846264338327933'), 'xxmax[0]': mpf('0.354937471352802291768568920815596'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('3.14159265358979323846264338327933'), 'UnitVectors[0][0]': mpf('0.382806309209406061575292302010405'), 'UnitVectors[0][1]': mpf('0.229846517234847206916722007015529'), 'UnitVectors[0][2]': mpf('0.89477925106949349834357305423809'), 'UnitVectors[1][0]': mpf('0.767122970337180743799689847271659'), 'UnitVectors[1][1]': mpf('0.460599887674263978933755506836872'), 'UnitVectors[1][2]': mpf('-0.446508781386790105535572873400882'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('1.0'), 'ReU[1]': mpf('2.42791311848755422950231765355056'), 'ReU[2]': mpf('5.43754841942148569414023177163917'), 'ReDD[0][0]': mpf('1.0'), 'ReDD[0][1]': mpf('0.411876352734953155021457860129885'), 'ReDD[0][2]': mpf('0.183906408341719647282694547690429'), 'ReDD[1][0]': mpf('0.411876352734953155021457860129885'), 'ReDD[1][1]': mpf('0.169642129942247553302413230380042'), 'ReDD[1][2]': mpf('0.0757467007123724527689343539161123'), 'ReDD[2][0]': mpf('0.183906408341719647282694547690429'), 'ReDD[2][1]': mpf('0.0757467007123724527689343539161123'), 'ReDD[2][2]': mpf('0.0338215670291513298663469469928762'), 'ghatDD[0][0]': mpf('1.0'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.169642129942247553302413230380042'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.0338215670291513298663469469928762'), 'ghatUU[0][0]': mpf('1.0'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('5.89476211092396054332782841162283'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('29.5669328135530973007843198587076'), 'detgammahat': mpf('0.00573756266880972544377979111558305'), 'detgammahatdD[0]': mpf('0.0557212146869903791885764536246544'), 'detgammahatdD[1]': mpf('0.0229955254712656127315158404877634'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('0.405858804349815958396163363914466'), 'detgammahatdDD[0][1]': mpf('0.223324551832801915665195788045795'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.223324551832801915665195788045795'), 'detgammahatdDD[1][1]': mpf('0.0346066538274459059495911839041824'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('0.0'), 'ReUdD[0][1]': mpf('0.0'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-5.89476211092396054332782841162283'), 'ReUdD[1][1]': mpf('0.0'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('-13.2018951399246908178649920966756'), 'ReUdD[2][1]': mpf('-10.8965505387661953996938232611417'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('0.0'), 'ReUdDD[0][0][1]': mpf('0.0'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('0.0'), 'ReUdDD[0][1][1]': mpf('0.0'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('28.6239405189513422055020582143196'), 'ReUdDD[1][0][1]': mpf('0.0'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('0.0'), 'ReUdDD[1][1][1]': mpf('0.0'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('64.106108798240484363409875606733'), 'ReUdDD[2][0][1]': mpf('26.4558779993330726480291033870892'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('26.4558779993330726480291033870892'), 'ReUdDD[2][1][1]': mpf('49.1097346641624035099886358431359'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.0'), 'ReDDdD[0][0][1]': mpf('0.0'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('1.0'), 'ReDDdD[0][1][1]': mpf('0.0'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.446508781386790105535572873400882'), 'ReDDdD[0][2][1]': mpf('0.368538414433415914206730331101263'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('1.0'), 'ReDDdD[1][0][1]': mpf('0.0'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.823752705469906310042915720259771'), 'ReDDdD[1][1][1]': mpf('0.0'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.367812816683439294565389095380858'), 'ReDDdD[1][2][1]': mpf('0.151792257979557964076657671944573'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.446508781386790105535572873400882'), 'ReDDdD[2][0][1]': mpf('0.368538414433415914206730331101263'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.367812816683439294565389095380858'), 'ReDDdD[2][1][1]': mpf('0.151792257979557964076657671944573'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.164231652555765300486215613380094'), 'ReDDdD[2][2][1]': mpf('0.135553152268803385884212672398363'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('0.0'), 'ReDDdDD[0][0][0][1]': mpf('0.0'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('0.0'), 'ReDDdDD[0][0][1][1]': mpf('0.0'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.0'), 'ReDDdDD[0][1][0][1]': mpf('0.0'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.0'), 'ReDDdDD[0][1][1][1]': mpf('0.0'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.0'), 'ReDDdDD[0][2][0][1]': mpf('0.89477925106949349834357305423809'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('0.89477925106949349834357305423809'), 'ReDDdDD[0][2][1][1]': mpf('-0.183906408341719647282694547690429'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('0.0'), 'ReDDdDD[1][0][0][1]': mpf('0.0'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.0'), 'ReDDdDD[1][0][1][1]': mpf('0.0'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('2.0'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.0'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.893017562773580211071145746801763'), 'ReDDdDD[1][2][0][1]': mpf('0.737076828866831828413460662202525'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.737076828866831828413460662202525'), 'ReDDdDD[1][2][1][1]': mpf('-0.0757467007123724527689343539161123'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('0.0'), 'ReDDdDD[2][0][0][1]': mpf('0.89477925106949349834357305423809'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('0.89477925106949349834357305423809'), 'ReDDdDD[2][0][1][1]': mpf('-0.183906408341719647282694547690429'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('0.893017562773580211071145746801763'), 'ReDDdDD[2][1][0][1]': mpf('0.737076828866831828413460662202525'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.737076828866831828413460662202525'), 'ReDDdDD[2][1][1][1]': mpf('-0.0757467007123724527689343539161123'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.398740183711032636401413154836037'), 'ReDDdDD[2][2][0][1]': mpf('0.65822255329153743092424550670865'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.65822255329153743092424550670865'), 'ReDDdDD[2][2][1][1]': mpf('0.203997991767889787139438672788579'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.0'), 'ghatDDdD[0][0][1]': mpf('0.0'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.823752705469906310042915720259771'), 'ghatDDdD[1][1][1]': mpf('0.0'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.164231652555765300486215613380094'), 'ghatDDdD[2][2][1]': mpf('0.135553152268803385884212672398388'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('0.0'), 'ghatDDdDD[0][0][0][1]': mpf('0.0'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('0.0'), 'ghatDDdDD[0][0][1][1]': mpf('0.0'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('2.0'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.0'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.398740183711032636401413154836037'), 'ghatDDdDD[2][2][0][1]': mpf('0.65822255329153743092424550670865'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.65822255329153743092424550670865'), 'ghatDDdDD[2][2][1][1]': mpf('0.203997991767889787139438672788579'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('0.0'), 'GammahatUDD[0][0][1]': mpf('0.0'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.0'), 'GammahatUDD[0][1][1]': mpf('-0.411876352734953155021457860129885'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.0821158262778826502431078066900468'), 'GammahatUDD[1][0][0]': mpf('0.0'), 'GammahatUDD[1][0][1]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[1][1][1]': mpf('0.0'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('-0.399526793005224249185024570708237'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][0]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[2][2][1]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.0'), 'GammahatUDDdD[0][0][0][1]': mpf('0.0'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('0.0'), 'GammahatUDDdD[0][0][1][1]': mpf('0.0'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('0.0'), 'GammahatUDDdD[0][1][0][1]': mpf('0.0'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-1.0'), 'GammahatUDDdD[0][1][1][1]': mpf('0.0'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('-0.199370091855516318200706577418018'), 'GammahatUDDdD[0][2][2][1]': mpf('-0.329111276645768715462122753354325'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('0.0'), 'GammahatUDDdD[1][0][0][1]': mpf('0.0'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[1][0][1][1]': mpf('0.0'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[1][1][0][1]': mpf('0.0'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('0.0'), 'GammahatUDDdD[1][1][1][1]': mpf('0.0'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('0.0'), 'GammahatUDDdD[1][2][2][1]': mpf('-0.601259816288967363598586845163963'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.505750044712757928228004429495509'), 'Cart_to_xx[1]': mpf('1.10685304642936859912503384510848'), 'Cart_to_xx[2]': mpf('0.104277269822046439469449849362747'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.157668866441098877443891511556478'), 'xxCart[1]': mpf('0.094668345207520417874693473132221'), 'xxCart[2]': mpf('0.368538414433415914206730331101263'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.411876352734953155021457860129885'), 'xxSph[1]': mpf('0.462859757962336137993020201975014'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('1.0'), 'scalefactor_orthog[1]': mpf('0.411876352734953155021457860129885'), 'scalefactor_orthog[2]': mpf('0.183906408341719647282694547690429'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-08-16
trusted_values_dict['rfm_SymTP__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('0.0'), 'xxmin[2]': mpf('-3.14159265358979323846264338327933'), 'xxmax[0]': mpf('0.703939963785164879439548712980468'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('3.14159265358979323846264338327933'), 'UnitVectors[0][0]': mpf('0.46801561789376499018226136073532'), 'UnitVectors[0][1]': mpf('0.28100832508889521211875207319997'), 'UnitVectors[0][2]': mpf('0.837851838118322175953119540393393'), 'UnitVectors[1][0]': mpf('0.718317272099859683543177433880891'), 'UnitVectors[1][1]': mpf('0.43129572133429204037732757049013'), 'UnitVectors[1][2]': mpf('-0.545897698622872791818466443125938'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('1.22259118158302606744727614251624'), 'ReU[1]': mpf('2.27344506109741631003710499801229'), 'ReU[2]': mpf('5.43754841942148569414023177163917'), 'ReDD[0][0]': mpf('0.669017506192200994262607300899508'), 'ReDD[0][1]': mpf('0.359777730014916887125929516927243'), 'ReDD[0][2]': mpf('0.150423470340752313113694664542724'), 'ReDD[1][0]': mpf('0.359777730014916887125929516927243'), 'ReDD[1][1]': mpf('0.193477769739406203021334896004792'), 'ReDD[1][2]': mpf('0.0808932714006055866049439032401947'), 'ReDD[2][0]': mpf('0.150423470340752313113694664542724'), 'ReDD[2][1]': mpf('0.0808932714006055866049439032401947'), 'ReDD[2][2]': mpf('0.0338215670291513298663469469928762'), 'ghatDD[0][0]': mpf('0.669017506192200994262607300899508'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.193477769739406203021334896004792'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.0338215670291513298663469469928762'), 'ghatUU[0][0]': mpf('1.49472919728457981804822138394706'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('5.16855244582823497883889210639943'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('29.5669328135530973007843198587076'), 'detgammahat': mpf('0.00437786414407357156991423964575856'), 'detgammahatdD[0]': mpf('0.0460666417858444964940265654337666'), 'detgammahatdD[1]': mpf('0.0218691879861552420592228986154051'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('0.370164647736969709559665353460694'), 'detgammahatdDD[0][1]': mpf('0.211714924080265088152071411759566'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.211714924080265088152071411759566'), 'detgammahatdDD[1][1]': mpf('0.0696999055972490038137406309538025'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('-0.86143411822332434087109428681728'), 'ReUdD[0][1]': mpf('-0.30183034521239483153983010952263'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-4.83972014546837921403902522544467'), 'ReUdD[1][1]': mpf('-0.561262601881320536577531870540302'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('-13.2018951399246908178649920966756'), 'ReUdD[2][1]': mpf('-10.8965505387661953996938232611417'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('4.63684829419573949369650592583662'), 'ReUdDD[0][0][1]': mpf('1.49774437803037133808177349997465'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('1.49774437803037133808177349997465'), 'ReUdDD[0][1][1]': mpf('-0.230688080582734693220745618808786'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('19.1580344822830421057725291557467'), 'ReUdDD[1][0][1]': mpf('3.58445510873064045380039019531003'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('3.58445510873064045380039019531003'), 'ReUdDD[1][1][1]': mpf('-0.428971421809036794165601971698718'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('64.106108798240484363409875606733'), 'ReUdDD[2][0][1]': mpf('26.4558779993330726480291033870892'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('26.4558779993330726480291033870892'), 'ReUdDD[2][1][1]': mpf('49.1097346641624035099886358431359'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('0.942775498799896463178323525955064'), 'ReDDdD[0][0][1]': mpf('0.330330838123118733961771018912601'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('1.01939478240482418103049853764282'), 'ReDDdD[0][1][1]': mpf('0.177642106512707063538468686523353'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.471203047754640065821774090975542'), 'ReDDdD[0][2][1]': mpf('0.33857661387548699188482489467723'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('1.01939478240482418103049853764282'), 'ReDDdD[1][0][1]': mpf('0.177642106512707063538468686523353'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.823752705469906310042915720259771'), 'ReDDdD[1][1][1]': mpf('0.0955306449303118023167907922425562'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.368607797459871691892746369536828'), 'ReDDdD[1][2][1]': mpf('0.18207643962797058555478104917644'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.471203047754640065821774090975542'), 'ReDDdD[2][0][1]': mpf('0.33857661387548699188482489467723'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.368607797459871691892746369536828'), 'ReDDdD[2][1][1]': mpf('0.18207643962797058555478104917644'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.164231652555765300486215613380094'), 'ReDDdD[2][2][1]': mpf('0.135553152268803385884212672398363'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('-3.08185473609943954439429467458492'), 'ReDDdDD[0][0][0][1]': mpf('-0.940919312975376716727830815584994'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('-0.940919312975376716727830815584994'), 'ReDDdDD[0][0][1][1]': mpf('0.497124754889442128453622559663637'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('0.301103853982326328307401653540245'), 'ReDDdDD[0][1][0][1]': mpf('-0.252999220062428953046055855518453'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('-0.252999220062428953046055855518453'), 'ReDDdDD[0][1][1][1]': mpf('0.267338917431799816105444208443336'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('0.0935144582744054397119417551178965'), 'ReDDdDD[0][2][0][1]': mpf('0.902483337077520421137803715458438'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('0.902483337077520421137803715458438'), 'ReDDdDD[0][2][1][1]': mpf('0.0451335545608129844213320731463686'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('0.301103853982326328307401653540245'), 'ReDDdDD[1][0][0][1]': mpf('-0.252999220062428953046055855518453'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('-0.252999220062428953046055855518453'), 'ReDDdDD[1][0][1][1]': mpf('0.267338917431799816105444208443336'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('2.0'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.143766923836857097734916423694981'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('0.887710513855508657608411248276111'), 'ReDDdDD[1][2][0][1]': mpf('0.744643345230956058040148772768342'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.744643345230956058040148772768342'), 'ReDDdDD[1][2][1][1]': mpf('0.0242714841646142085255686359306849'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('0.0935144582744054397119417551178965'), 'ReDDdDD[2][0][0][1]': mpf('0.902483337077520421137803715458438'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('0.902483337077520421137803715458438'), 'ReDDdDD[2][0][1][1]': mpf('0.0451335545608129844213320731463686'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('0.887710513855508657608411248276111'), 'ReDDdDD[2][1][0][1]': mpf('0.744643345230956058040148772768342'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.744643345230956058040148772768342'), 'ReDDdDD[2][1][1][1]': mpf('0.0242714841646142085255686359306849'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.398740183711032636401413154836037'), 'ReDDdDD[2][2][0][1]': mpf('0.65822255329153743092424550670865'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.65822255329153743092424550670865'), 'ReDDdDD[2][2][1][1]': mpf('0.203997991767889787139438672788579'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('0.942775498799896463178323525954866'), 'ghatDDdD[0][0][1]': mpf('0.330330838123118733961771018912651'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.823752705469906310042915720259771'), 'ghatDDdD[1][1][1]': mpf('0.0955306449303118023167907922425685'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.164231652555765300486215613380094'), 'ghatDDdD[2][2][1]': mpf('0.135553152268803385884212672398388'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('-3.08185473609943954439429467458216'), 'ghatDDdDD[0][0][0][1]': mpf('-0.940919312975376716727830815584994'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('-0.940919312975376716727830815585093'), 'ghatDDdDD[0][0][1][1]': mpf('0.497124754889442128453622559663686'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('2.0'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.143766923836857097734916423695006'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.398740183711032636401413154836037'), 'ghatDDdDD[2][2][0][1]': mpf('0.65822255329153743092424550670865'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.65822255329153743092424550670865'), 'ghatDDdDD[2][2][1][1]': mpf('0.203997991767889787139438672788579'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('0.70459703227036929199923564248519'), 'GammahatUDD[0][0][1]': mpf('0.246877574253055871079564112437535'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.246877574253055871079564112437535'), 'GammahatUDD[0][1][1]': mpf('-0.615643610104016980757518243879733'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.122740923096699539559151587496857'), 'GammahatUDD[1][0][0]': mpf('-0.853666130656868049083075437423599'), 'GammahatUDD[1][0][1]': mpf('2.12880453030705496863829403774088'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.12880453030705496863829403774088'), 'GammahatUDD[1][1][1]': mpf('0.246877574253055871079564112437535'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('-0.350306788349325449768511693327361'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][0]': mpf('2.42791311848755422950231765355056'), 'GammahatUDD[2][2][1]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('-3.2961830836872215720889891110999'), 'GammahatUDDdD[0][0][0][1]': mpf('-1.05110819700224338103408450215333'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('-1.05110819700224338103408450215353'), 'GammahatUDDdD[0][0][1][1]': mpf('0.249636369574948434085186353529697'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('-1.05110819700224338103408450215353'), 'GammahatUDDdD[0][1][0][1]': mpf('0.249636369574948434085186353529697'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('-0.627167876053566412234171360052535'), 'GammahatUDDdD[0][1][1][1]': mpf('0.303977202133747659787320682779843'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('-0.125038517057628608696774482995599'), 'GammahatUDDdD[0][2][2][1]': mpf('-0.431328271646645030846640009317683'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('6.06617206082702293160894775114729'), 'GammahatUDDdD[1][0][0][1]': mpf('-0.86320563676567429087508609266758'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('-3.89506501068344685413450651719186'), 'GammahatUDDdD[1][0][1][1]': mpf('-1.05110819700224338103408450215333'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('-3.89506501068344685413450651719186'), 'GammahatUDDdD[1][1][0][1]': mpf('-1.05110819700224338103408450215333'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('-1.05110819700224338103408450215353'), 'GammahatUDDdD[1][1][1][1]': mpf('0.249636369574948434085186353529697'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('-0.209559537786423404810855325094516'), 'GammahatUDDdD[1][2][2][1]': mpf('-0.354221379343867210010300833893623'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('-5.89476211092396054332782841162283'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.488559899010444541612584647472559'), 'Cart_to_xx[1]': mpf('1.18304329028982242800313235440622'), 'Cart_to_xx[2]': mpf('0.104277269822046439469449849362747'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.157668866441098877443891511556478'), 'xxCart[1]': mpf('0.094668345207520417874693473132221'), 'xxCart[2]': mpf('0.481185686225892864539011427087351'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.51513224676565798533078140613188'), 'xxSph[1]': mpf('0.365063002562851945619681171911353'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.81793490339525247225351794534983'), 'scalefactor_orthog[1]': mpf('0.43986108004619617973383870310006'), 'scalefactor_orthog[2]': mpf('0.183906408341719647282694547690429'), 'scalefactor_orthog[3]': mpf('0.0')}
# Generated on: 2019-09-01
trusted_values_dict['rfm_SinhSymTP__reference_metric__True__globals'] = {'xxmin[0]': mpf('0.0'), 'xxmin[1]': mpf('0.0'), 'xxmin[2]': mpf('-3.14159265358979323846264338327933'), 'xxmax[0]': mpf('0.703939963785164879439548712980468'), 'xxmax[1]': mpf('3.14159265358979323846264338327933'), 'xxmax[2]': mpf('3.14159265358979323846264338327933'), 'UnitVectors[0][0]': mpf('0.548708314535992782637999074472269'), 'UnitVectors[0][1]': mpf('0.329458245697069934190436321098313'), 'UnitVectors[0][2]': mpf('0.768359583724494915439669332573844'), 'UnitVectors[1][0]': mpf('0.65873933201877091420097131703718'), 'UnitVectors[1][1]': mpf('0.39552363058702650514468033089682'), 'UnitVectors[1][2]': mpf('-0.640018398250175998992789818958649'), 'UnitVectors[2][0]': mpf('-0.51476371085240022831765766205346'), 'UnitVectors[2][1]': mpf('0.857332095508774529283937104826865'), 'UnitVectors[2][2]': mpf('0.0'), 'ReU[0]': mpf('1.47924548965904004613535260480264'), 'ReU[1]': mpf('3.3192675659769180732357091661618'), 'ReU[2]': mpf('8.65692489958368916669562314958266'), 'ReDD[0][0]': mpf('0.457003464572873171217597122524094'), 'ReDD[0][1]': mpf('0.203665507612976364376650054332535'), 'ReDD[0][2]': mpf('0.0780901210960588670528050278484045'), 'ReDD[1][0]': mpf('0.203665507612976364376650054332535'), 'ReDD[1][1]': mpf('0.0907643862831964259063923813739372'), 'ReDD[1][2]': mpf('0.0348011894558658216595561356355559'), 'ReDD[2][0]': mpf('0.0780901210960588670528050278484045'), 'ReDD[2][1]': mpf('0.0348011894558658216595561356355559'), 'ReDD[2][2]': mpf('0.0133435903346959164434571859896549'), 'ghatDD[0][0]': mpf('0.457003464572873171217597122524094'), 'ghatDD[0][1]': mpf('0.0'), 'ghatDD[0][2]': mpf('0.0'), 'ghatDD[1][0]': mpf('0.0'), 'ghatDD[1][1]': mpf('0.0907643862831964259063923813739372'), 'ghatDD[1][2]': mpf('0.0'), 'ghatDD[2][0]': mpf('0.0'), 'ghatDD[2][1]': mpf('0.0'), 'ghatDD[2][2]': mpf('0.0133435903346959164434571859896549'), 'ghatUU[0][0]': mpf('2.18816721867661315206647823056707'), 'ghatUU[0][1]': mpf('0.0'), 'ghatUU[0][2]': mpf('0.0'), 'ghatUU[1][0]': mpf('0.0'), 'ghatUU[1][1]': mpf('11.0175371745463341742615369918431'), 'ghatUU[1][2]': mpf('0.0'), 'ghatUU[2][0]': mpf('0.0'), 'ghatUU[2][1]': mpf('0.0'), 'ghatUU[2][2]': mpf('74.9423487170320667622284962391978'), 'detgammahat': mpf('0.000553487309930337166234751458831454'), 'detgammahatdD[0]': mpf('0.0119362217024101175051450721808919'), 'detgammahatdD[1]': mpf('0.00338342132720490082984454407504984'), 'detgammahatdD[2]': mpf('0.0'), 'detgammahatdDD[0][0]': mpf('0.257311958499953778442166823140297'), 'detgammahatdDD[0][1]': mpf('0.0665292379613693038404723022155649'), 'detgammahatdDD[0][2]': mpf('0.0'), 'detgammahatdDD[1][0]': mpf('0.0665292379613693038404723022155649'), 'detgammahatdDD[1][1]': mpf('0.0156573228855882525980296613505993'), 'detgammahatdDD[1][2]': mpf('0.0'), 'detgammahatdDD[2][0]': mpf('0.0'), 'detgammahatdDD[2][1]': mpf('0.0'), 'detgammahatdDD[2][2]': mpf('0.0'), 'ReUdD[0][0]': mpf('-6.32415494200831229543101902404877'), 'ReUdD[0][1]': mpf('-0.77846213379588986364026929202053'), 'ReUdD[0][2]': mpf('0.0'), 'ReUdD[1][0]': mpf('-9.16759317776976424770695704592514'), 'ReUdD[1][1]': mpf('-1.74678518887724652108913552297186'), 'ReUdD[1][2]': mpf('0.0'), 'ReUdD[2][0]': mpf('-32.4249641606388199467921339332317'), 'ReUdD[2][1]': mpf('-17.3480054617432187457541289969179'), 'ReUdD[2][2]': mpf('0.0'), 'ReUdDD[0][0][0]': mpf('26.6586600713615640377576416336181'), 'ReUdDD[0][0][1]': mpf('7.62824568235473883068884456845697'), 'ReUdDD[0][0][2]': mpf('0.0'), 'ReUdDD[0][1][0]': mpf('7.62824568235473883068884456845697'), 'ReUdDD[0][1][1]': mpf('0.057480669817244549414364848766103'), 'ReUdDD[0][1][2]': mpf('0.0'), 'ReUdDD[0][2][0]': mpf('0.0'), 'ReUdDD[0][2][1]': mpf('0.0'), 'ReUdDD[0][2][2]': mpf('0.0'), 'ReUdDD[1][0][0]': mpf('15.4258113776845981390532969468595'), 'ReUdDD[1][0][1]': mpf('14.4735086843176833909358393855098'), 'ReUdDD[1][0][2]': mpf('0.0'), 'ReUdDD[1][1][0]': mpf('14.4735086843176833909358393855098'), 'ReUdDD[1][1][1]': mpf('0.128980432476414295545293163281777'), 'ReUdDD[1][1][2]': mpf('0.0'), 'ReUdDD[1][2][0]': mpf('0.0'), 'ReUdDD[1][2][1]': mpf('0.0'), 'ReUdDD[1][2][2]': mpf('0.0'), 'ReUdDD[2][0][0]': mpf('150.241222027929966935789897479339'), 'ReUdDD[2][0][1]': mpf('64.977860138608960091680798202983'), 'ReUdDD[2][0][2]': mpf('0.0'), 'ReUdDD[2][1][0]': mpf('64.977860138608960091680798202983'), 'ReUdDD[2][1][1]': mpf('78.1858389173419738223505278091928'), 'ReUdDD[2][1][2]': mpf('0.0'), 'ReUdDD[2][2][0]': mpf('0.0'), 'ReUdDD[2][2][1]': mpf('0.0'), 'ReUdDD[2][2][2]': mpf('0.0'), 'ReDDdD[0][0][0]': mpf('3.90761471195646721040231827642928'), 'ReDDdD[0][0][1]': mpf('0.48100182785146012298889210827456'), 'ReDDdD[0][0][2]': mpf('0.0'), 'ReDDdD[0][1][0]': mpf('1.43323282699357910359780632581264'), 'ReDDdD[0][1][1]': mpf('0.214360478697237470214706704336409'), 'ReDDdD[0][1][2]': mpf('0.0'), 'ReDDdD[0][2][0]': mpf('0.626345978477168648810060348590238'), 'ReDDdD[0][2][1]': mpf('0.197583756276826201783950957514413'), 'ReDDdD[0][2][2]': mpf('0.0'), 'ReDDdD[1][0][0]': mpf('1.43323282699357910359780632581264'), 'ReDDdD[1][0][1]': mpf('0.214360478697237470214706704336409'), 'ReDDdD[1][0][2]': mpf('0.0'), 'ReDDdD[1][1][0]': mpf('0.50137022818128390608956124981649'), 'ReDDdD[1][1][1]': mpf('0.0955306449303118023167907922425562'), 'ReDDdD[1][1][2]': mpf('0.0'), 'ReDDdD[1][2][0]': mpf('0.226468210371401414822176038888592'), 'ReDDdD[1][2][1]': mpf('0.088054028333917886603870516060628'), 'ReDDdD[1][2][2]': mpf('0.0'), 'ReDDdD[2][0][0]': mpf('0.626345978477168648810060348590238'), 'ReDDdD[2][0][1]': mpf('0.197583756276826201783950957514413'), 'ReDDdD[2][0][2]': mpf('0.0'), 'ReDDdD[2][1][0]': mpf('0.226468210371401414822176038888592'), 'ReDDdD[2][1][1]': mpf('0.088054028333917886603870516060628'), 'ReDDdD[2][1][2]': mpf('0.0'), 'ReDDdD[2][2][0]': mpf('0.0999582284461237485230114268939553'), 'ReDDdD[2][2][1]': mpf('0.053479654886852688892632681595028'), 'ReDDdD[2][2][2]': mpf('0.0'), 'ReDDdDD[0][0][0][0]': mpf('33.6461277030887655376842392041423'), 'ReDDdDD[0][0][0][1]': mpf('1.45582384014133679648446347791919'), 'ReDDdDD[0][0][0][2]': mpf('0.0'), 'ReDDdDD[0][0][1][0]': mpf('1.45582384014133679648446347791919'), 'ReDDdDD[0][0][1][1]': mpf('0.723874032260071030172139837618865'), 'ReDDdDD[0][0][1][2]': mpf('0.0'), 'ReDDdDD[0][0][2][0]': mpf('0.0'), 'ReDDdDD[0][0][2][1]': mpf('0.0'), 'ReDDdDD[0][0][2][2]': mpf('0.0'), 'ReDDdDD[0][1][0][0]': mpf('10.7451856287246364150683462252173'), 'ReDDdDD[0][1][0][1]': mpf('0.324396995189713971490675606422249'), 'ReDDdDD[0][1][0][2]': mpf('0.0'), 'ReDDdDD[0][1][1][0]': mpf('0.324396995189713971490675606422249'), 'ReDDdDD[0][1][1][1]': mpf('0.322597493578936954412344137333148'), 'ReDDdDD[0][1][1][2]': mpf('0.0'), 'ReDDdDD[0][1][2][0]': mpf('0.0'), 'ReDDdDD[0][1][2][1]': mpf('0.0'), 'ReDDdDD[0][1][2][2]': mpf('0.0'), 'ReDDdDD[0][2][0][0]': mpf('4.78407724980688767505599125734505'), 'ReDDdDD[0][2][0][1]': mpf('1.35777614134257772593543435159905'), 'ReDDdDD[0][2][0][2]': mpf('0.0'), 'ReDDdDD[0][2][1][0]': mpf('1.35777614134257772593543435159905'), 'ReDDdDD[0][2][1][1]': mpf('0.126834804943425177790310365245279'), 'ReDDdDD[0][2][1][2]': mpf('0.0'), 'ReDDdDD[0][2][2][0]': mpf('0.0'), 'ReDDdDD[0][2][2][1]': mpf('0.0'), 'ReDDdDD[0][2][2][2]': mpf('0.0'), 'ReDDdDD[1][0][0][0]': mpf('10.7451856287246364150683462252173'), 'ReDDdDD[1][0][0][1]': mpf('0.324396995189713971490675606422249'), 'ReDDdDD[1][0][0][2]': mpf('0.0'), 'ReDDdDD[1][0][1][0]': mpf('0.324396995189713971490675606422249'), 'ReDDdDD[1][0][1][1]': mpf('0.322597493578936954412344137333148'), 'ReDDdDD[1][0][1][2]': mpf('0.0'), 'ReDDdDD[1][0][2][0]': mpf('0.0'), 'ReDDdDD[1][0][2][1]': mpf('0.0'), 'ReDDdDD[1][0][2][2]': mpf('0.0'), 'ReDDdDD[1][1][0][0]': mpf('3.31062381280758068734077928326181'), 'ReDDdDD[1][1][0][1]': mpf('0.0'), 'ReDDdDD[1][1][0][2]': mpf('0.0'), 'ReDDdDD[1][1][1][0]': mpf('0.0'), 'ReDDdDD[1][1][1][1]': mpf('0.143766923836857097734916423694981'), 'ReDDdDD[1][1][1][2]': mpf('0.0'), 'ReDDdDD[1][1][2][0]': mpf('0.0'), 'ReDDdDD[1][1][2][1]': mpf('0.0'), 'ReDDdDD[1][1][2][2]': mpf('0.0'), 'ReDDdDD[1][2][0][0]': mpf('1.46173359539746816193465338066765'), 'ReDDdDD[1][2][0][1]': mpf('0.471844291738266348029665500005062'), 'ReDDdDD[1][2][0][2]': mpf('0.0'), 'ReDDdDD[1][2][1][0]': mpf('0.471844291738266348029665500005062'), 'ReDDdDD[1][2][1][1]': mpf('0.056524461922709156989760400687313'), 'ReDDdDD[1][2][1][2]': mpf('0.0'), 'ReDDdDD[1][2][2][0]': mpf('0.0'), 'ReDDdDD[1][2][2][1]': mpf('0.0'), 'ReDDdDD[1][2][2][2]': mpf('0.0'), 'ReDDdDD[2][0][0][0]': mpf('4.78407724980688767505599125734505'), 'ReDDdDD[2][0][0][1]': mpf('1.35777614134257772593543435159905'), 'ReDDdDD[2][0][0][2]': mpf('0.0'), 'ReDDdDD[2][0][1][0]': mpf('1.35777614134257772593543435159905'), 'ReDDdDD[2][0][1][1]': mpf('0.126834804943425177790310365245279'), 'ReDDdDD[2][0][1][2]': mpf('0.0'), 'ReDDdDD[2][0][2][0]': mpf('0.0'), 'ReDDdDD[2][0][2][1]': mpf('0.0'), 'ReDDdDD[2][0][2][2]': mpf('0.0'), 'ReDDdDD[2][1][0][0]': mpf('1.46173359539746816193465338066765'), 'ReDDdDD[2][1][0][1]': mpf('0.471844291738266348029665500005062'), 'ReDDdDD[2][1][0][2]': mpf('0.0'), 'ReDDdDD[2][1][1][0]': mpf('0.471844291738266348029665500005062'), 'ReDDdDD[2][1][1][1]': mpf('0.056524461922709156989760400687313'), 'ReDDdDD[2][1][1][2]': mpf('0.0'), 'ReDDdDD[2][1][2][0]': mpf('0.0'), 'ReDDdDD[2][1][2][1]': mpf('0.0'), 'ReDDdDD[2][1][2][2]': mpf('0.0'), 'ReDDdDD[2][2][0][0]': mpf('0.660039373658507022405700360423847'), 'ReDDdDD[2][2][0][1]': mpf('0.400621678747131729256894464832948'), 'ReDDdDD[2][2][0][2]': mpf('0.0'), 'ReDDdDD[2][2][1][0]': mpf('0.400621678747131729256894464832948'), 'ReDDdDD[2][2][1][1]': mpf('0.0804831316332918866011126875397545'), 'ReDDdDD[2][2][1][2]': mpf('0.0'), 'ReDDdDD[2][2][2][0]': mpf('0.0'), 'ReDDdDD[2][2][2][1]': mpf('0.0'), 'ReDDdDD[2][2][2][2]': mpf('0.0'), 'ghatDDdD[0][0][0]': mpf('3.90761471195646721040231827643047'), 'ghatDDdD[0][0][1]': mpf('0.481001827851460122988892108274807'), 'ghatDDdD[0][0][2]': mpf('0.0'), 'ghatDDdD[0][1][0]': mpf('0.0'), 'ghatDDdD[0][1][1]': mpf('0.0'), 'ghatDDdD[0][1][2]': mpf('0.0'), 'ghatDDdD[0][2][0]': mpf('0.0'), 'ghatDDdD[0][2][1]': mpf('0.0'), 'ghatDDdD[0][2][2]': mpf('0.0'), 'ghatDDdD[1][0][0]': mpf('0.0'), 'ghatDDdD[1][0][1]': mpf('0.0'), 'ghatDDdD[1][0][2]': mpf('0.0'), 'ghatDDdD[1][1][0]': mpf('0.501370228181283906089561249816688'), 'ghatDDdD[1][1][1]': mpf('0.0955306449303118023167907922425685'), 'ghatDDdD[1][1][2]': mpf('0.0'), 'ghatDDdD[1][2][0]': mpf('0.0'), 'ghatDDdD[1][2][1]': mpf('0.0'), 'ghatDDdD[1][2][2]': mpf('0.0'), 'ghatDDdD[2][0][0]': mpf('0.0'), 'ghatDDdD[2][0][1]': mpf('0.0'), 'ghatDDdD[2][0][2]': mpf('0.0'), 'ghatDDdD[2][1][0]': mpf('0.0'), 'ghatDDdD[2][1][1]': mpf('0.0'), 'ghatDDdD[2][1][2]': mpf('0.0'), 'ghatDDdD[2][2][0]': mpf('0.0999582284461237485230114268939676'), 'ghatDDdD[2][2][1]': mpf('0.0534796548868526888926326815950342'), 'ghatDDdD[2][2][2]': mpf('0.0'), 'ghatDDdDD[0][0][0][0]': mpf('33.6461277030887655376842392040855'), 'ghatDDdDD[0][0][0][1]': mpf('1.45582384014133679648446347791899'), 'ghatDDdDD[0][0][0][2]': mpf('0.0'), 'ghatDDdDD[0][0][1][0]': mpf('1.4558238401413367964844634779182'), 'ghatDDdDD[0][0][1][1]': mpf('0.72387403226007103017213983761926'), 'ghatDDdDD[0][0][1][2]': mpf('0.0'), 'ghatDDdDD[0][0][2][0]': mpf('0.0'), 'ghatDDdDD[0][0][2][1]': mpf('0.0'), 'ghatDDdDD[0][0][2][2]': mpf('0.0'), 'ghatDDdDD[0][1][0][0]': mpf('0.0'), 'ghatDDdDD[0][1][0][1]': mpf('0.0'), 'ghatDDdDD[0][1][0][2]': mpf('0.0'), 'ghatDDdDD[0][1][1][0]': mpf('0.0'), 'ghatDDdDD[0][1][1][1]': mpf('0.0'), 'ghatDDdDD[0][1][1][2]': mpf('0.0'), 'ghatDDdDD[0][1][2][0]': mpf('0.0'), 'ghatDDdDD[0][1][2][1]': mpf('0.0'), 'ghatDDdDD[0][1][2][2]': mpf('0.0'), 'ghatDDdDD[0][2][0][0]': mpf('0.0'), 'ghatDDdDD[0][2][0][1]': mpf('0.0'), 'ghatDDdDD[0][2][0][2]': mpf('0.0'), 'ghatDDdDD[0][2][1][0]': mpf('0.0'), 'ghatDDdDD[0][2][1][1]': mpf('0.0'), 'ghatDDdDD[0][2][1][2]': mpf('0.0'), 'ghatDDdDD[0][2][2][0]': mpf('0.0'), 'ghatDDdDD[0][2][2][1]': mpf('0.0'), 'ghatDDdDD[0][2][2][2]': mpf('0.0'), 'ghatDDdDD[1][0][0][0]': mpf('0.0'), 'ghatDDdDD[1][0][0][1]': mpf('0.0'), 'ghatDDdDD[1][0][0][2]': mpf('0.0'), 'ghatDDdDD[1][0][1][0]': mpf('0.0'), 'ghatDDdDD[1][0][1][1]': mpf('0.0'), 'ghatDDdDD[1][0][1][2]': mpf('0.0'), 'ghatDDdDD[1][0][2][0]': mpf('0.0'), 'ghatDDdDD[1][0][2][1]': mpf('0.0'), 'ghatDDdDD[1][0][2][2]': mpf('0.0'), 'ghatDDdDD[1][1][0][0]': mpf('3.3106238128075806873407792832626'), 'ghatDDdDD[1][1][0][1]': mpf('0.0'), 'ghatDDdDD[1][1][0][2]': mpf('0.0'), 'ghatDDdDD[1][1][1][0]': mpf('0.0'), 'ghatDDdDD[1][1][1][1]': mpf('0.143766923836857097734916423695006'), 'ghatDDdDD[1][1][1][2]': mpf('0.0'), 'ghatDDdDD[1][1][2][0]': mpf('0.0'), 'ghatDDdDD[1][1][2][1]': mpf('0.0'), 'ghatDDdDD[1][1][2][2]': mpf('0.0'), 'ghatDDdDD[1][2][0][0]': mpf('0.0'), 'ghatDDdDD[1][2][0][1]': mpf('0.0'), 'ghatDDdDD[1][2][0][2]': mpf('0.0'), 'ghatDDdDD[1][2][1][0]': mpf('0.0'), 'ghatDDdDD[1][2][1][1]': mpf('0.0'), 'ghatDDdDD[1][2][1][2]': mpf('0.0'), 'ghatDDdDD[1][2][2][0]': mpf('0.0'), 'ghatDDdDD[1][2][2][1]': mpf('0.0'), 'ghatDDdDD[1][2][2][2]': mpf('0.0'), 'ghatDDdDD[2][0][0][0]': mpf('0.0'), 'ghatDDdDD[2][0][0][1]': mpf('0.0'), 'ghatDDdDD[2][0][0][2]': mpf('0.0'), 'ghatDDdDD[2][0][1][0]': mpf('0.0'), 'ghatDDdDD[2][0][1][1]': mpf('0.0'), 'ghatDDdDD[2][0][1][2]': mpf('0.0'), 'ghatDDdDD[2][0][2][0]': mpf('0.0'), 'ghatDDdDD[2][0][2][1]': mpf('0.0'), 'ghatDDdDD[2][0][2][2]': mpf('0.0'), 'ghatDDdDD[2][1][0][0]': mpf('0.0'), 'ghatDDdDD[2][1][0][1]': mpf('0.0'), 'ghatDDdDD[2][1][0][2]': mpf('0.0'), 'ghatDDdDD[2][1][1][0]': mpf('0.0'), 'ghatDDdDD[2][1][1][1]': mpf('0.0'), 'ghatDDdDD[2][1][1][2]': mpf('0.0'), 'ghatDDdDD[2][1][2][0]': mpf('0.0'), 'ghatDDdDD[2][1][2][1]': mpf('0.0'), 'ghatDDdDD[2][1][2][2]': mpf('0.0'), 'ghatDDdDD[2][2][0][0]': mpf('0.660039373658507022405700360423945'), 'ghatDDdDD[2][2][0][1]': mpf('0.400621678747131729256894464833096'), 'ghatDDdDD[2][2][0][2]': mpf('0.0'), 'ghatDDdDD[2][2][1][0]': mpf('0.400621678747131729256894464833047'), 'ghatDDdDD[2][2][1][1]': mpf('0.0804831316332918866011126875397545'), 'ghatDDdDD[2][2][1][2]': mpf('0.0'), 'ghatDDdDD[2][2][2][0]': mpf('0.0'), 'ghatDDdDD[2][2][2][1]': mpf('0.0'), 'ghatDDdDD[2][2][2][2]': mpf('0.0'), 'GammahatUDD[0][0][0]': mpf('4.27525720796079885010623506074686'), 'GammahatUDD[0][0][1]': mpf('0.526256215914048288725427930749466'), 'GammahatUDD[0][0][2]': mpf('0.0'), 'GammahatUDD[0][1][0]': mpf('0.526256215914048288725427930749466'), 'GammahatUDD[0][1][1]': mpf('-0.548540948863349447449098217793289'), 'GammahatUDD[0][1][2]': mpf('0.0'), 'GammahatUDD[0][2][0]': mpf('0.0'), 'GammahatUDD[0][2][1]': mpf('0.0'), 'GammahatUDD[0][2][2]': mpf('-0.10936265936139805885662121039833'), 'GammahatUDD[1][0][0]': mpf('-2.64972775968909909582312411205288'), 'GammahatUDD[1][0][1]': mpf('2.7619325635990367680012030847882'), 'GammahatUDD[1][0][2]': mpf('0.0'), 'GammahatUDD[1][1][0]': mpf('2.7619325635990367680012030847882'), 'GammahatUDD[1][1][1]': mpf('0.526256215914048288725427930749268'), 'GammahatUDD[1][1][2]': mpf('0.0'), 'GammahatUDD[1][2][0]': mpf('0.0'), 'GammahatUDD[1][2][1]': mpf('0.0'), 'GammahatUDD[1][2][2]': mpf('-0.294607042898904013414419049755124'), 'GammahatUDD[2][0][0]': mpf('0.0'), 'GammahatUDD[2][0][1]': mpf('0.0'), 'GammahatUDD[2][0][2]': mpf('3.74555220667308017274554980630281'), 'GammahatUDD[2][1][0]': mpf('0.0'), 'GammahatUDD[2][1][1]': mpf('0.0'), 'GammahatUDD[2][1][2]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][0]': mpf('3.74555220667308017274554980630281'), 'GammahatUDD[2][2][1]': mpf('2.0039454728985211544890548692023'), 'GammahatUDD[2][2][2]': mpf('0.0'), 'GammahatUDDdD[0][0][0][0]': mpf('0.256028449211412773411224844475167'), 'GammahatUDDdD[0][0][0][1]': mpf('-2.90696835905883120101375990313686'), 'GammahatUDDdD[0][0][0][2]': mpf('0.0'), 'GammahatUDDdD[0][0][1][0]': mpf('-2.90696835905883120101375990314002'), 'GammahatUDDdD[0][0][1][1]': mpf('0.238087504345025461941909692329143'), 'GammahatUDDdD[0][0][1][2]': mpf('0.0'), 'GammahatUDDdD[0][0][2][0]': mpf('0.0'), 'GammahatUDDdD[0][0][2][1]': mpf('0.0'), 'GammahatUDDdD[0][0][2][2]': mpf('0.0'), 'GammahatUDDdD[0][1][0][0]': mpf('-2.90696835905883120101375990314002'), 'GammahatUDDdD[0][1][0][1]': mpf('0.238087504345025461941909692329143'), 'GammahatUDDdD[0][1][0][2]': mpf('0.0'), 'GammahatUDDdD[0][1][1][0]': mpf('1.06820804070151728493108813024714'), 'GammahatUDDdD[0][1][1][1]': mpf('0.577346168045455496082541734221812'), 'GammahatUDDdD[0][1][1][2]': mpf('0.0'), 'GammahatUDDdD[0][1][2][0]': mpf('0.0'), 'GammahatUDDdD[0][1][2][1]': mpf('0.0'), 'GammahatUDDdD[0][1][2][2]': mpf('0.0'), 'GammahatUDDdD[0][2][0][0]': mpf('0.0'), 'GammahatUDDdD[0][2][0][1]': mpf('0.0'), 'GammahatUDDdD[0][2][0][2]': mpf('0.0'), 'GammahatUDDdD[0][2][1][0]': mpf('0.0'), 'GammahatUDDdD[0][2][1][1]': mpf('0.0'), 'GammahatUDDdD[0][2][1][2]': mpf('0.0'), 'GammahatUDDdD[0][2][2][0]': mpf('0.212968735195462614988136553693575'), 'GammahatUDDdD[0][2][2][1]': mpf('-0.323208053707180606761463394449253'), 'GammahatUDDdD[0][2][2][2]': mpf('0.0'), 'GammahatUDDdD[1][0][0][0]': mpf('6.61694212914130286182217748004833'), 'GammahatUDDdD[1][0][0][1]': mpf('-1.19878312202425448372333554572109'), 'GammahatUDDdD[1][0][0][2]': mpf('0.0'), 'GammahatUDDdD[1][0][1][0]': mpf('2.98091749253542766679627189314195'), 'GammahatUDDdD[1][0][1][1]': mpf('-2.90696835905883120101375990313529'), 'GammahatUDDdD[1][0][1][2]': mpf('0.0'), 'GammahatUDDdD[1][0][2][0]': mpf('0.0'), 'GammahatUDDdD[1][0][2][1]': mpf('0.0'), 'GammahatUDDdD[1][0][2][2]': mpf('0.0'), 'GammahatUDDdD[1][1][0][0]': mpf('2.98091749253542766679627189314195'), 'GammahatUDDdD[1][1][0][1]': mpf('-2.90696835905883120101375990313529'), 'GammahatUDDdD[1][1][0][2]': mpf('0.0'), 'GammahatUDDdD[1][1][1][0]': mpf('-2.90696835905883120101375990313529'), 'GammahatUDDdD[1][1][1][1]': mpf('0.238087504345025461941909692329241'), 'GammahatUDDdD[1][1][1][2]': mpf('0.0'), 'GammahatUDDdD[1][1][2][0]': mpf('0.0'), 'GammahatUDDdD[1][1][2][1]': mpf('0.0'), 'GammahatUDDdD[1][1][2][2]': mpf('0.0'), 'GammahatUDDdD[1][2][0][0]': mpf('0.0'), 'GammahatUDDdD[1][2][0][1]': mpf('0.0'), 'GammahatUDDdD[1][2][0][2]': mpf('0.0'), 'GammahatUDDdD[1][2][1][0]': mpf('0.0'), 'GammahatUDDdD[1][2][1][1]': mpf('0.0'), 'GammahatUDDdD[1][2][1][2]': mpf('0.0'), 'GammahatUDDdD[1][2][2][0]': mpf('-0.579562548766638718567713488625028'), 'GammahatUDDdD[1][2][2][1]': mpf('-0.133285372191639856629637640120232'), 'GammahatUDDdD[1][2][2][2]': mpf('0.0'), 'GammahatUDDdD[2][0][0][0]': mpf('0.0'), 'GammahatUDDdD[2][0][0][1]': mpf('0.0'), 'GammahatUDDdD[2][0][0][2]': mpf('0.0'), 'GammahatUDDdD[2][0][1][0]': mpf('0.0'), 'GammahatUDDdD[2][0][1][1]': mpf('0.0'), 'GammahatUDDdD[2][0][1][2]': mpf('0.0'), 'GammahatUDDdD[2][0][2][0]': mpf('-3.32587221198352924008495365856667'), 'GammahatUDDdD[2][0][2][1]': mpf('0.0'), 'GammahatUDDdD[2][0][2][2]': mpf('0.0'), 'GammahatUDDdD[2][1][0][0]': mpf('0.0'), 'GammahatUDDdD[2][1][0][1]': mpf('0.0'), 'GammahatUDDdD[2][1][0][2]': mpf('0.0'), 'GammahatUDDdD[2][1][1][0]': mpf('0.0'), 'GammahatUDDdD[2][1][1][1]': mpf('0.0'), 'GammahatUDDdD[2][1][1][2]': mpf('0.0'), 'GammahatUDDdD[2][1][2][0]': mpf('0.0'), 'GammahatUDDdD[2][1][2][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][1][2][2]': mpf('0.0'), 'GammahatUDDdD[2][2][0][0]': mpf('-3.32587221198352924008495365856667'), 'GammahatUDDdD[2][2][0][1]': mpf('0.0'), 'GammahatUDDdD[2][2][0][2]': mpf('0.0'), 'GammahatUDDdD[2][2][1][0]': mpf('0.0'), 'GammahatUDDdD[2][2][1][1]': mpf('-5.01579745835047758287644822245949'), 'GammahatUDDdD[2][2][1][2]': mpf('0.0'), 'GammahatUDDdD[2][2][2][0]': mpf('0.0'), 'GammahatUDDdD[2][2][2][1]': mpf('0.0'), 'GammahatUDDdD[2][2][2][2]': mpf('0.0'), 'Cart_to_xx[0]': mpf('0.0'), 'Cart_to_xx[1]': mpf('0.0'), 'Cart_to_xx[2]': mpf('0.0'), 'Cart_to_xx[3]': mpf('0.0'), 'xxCart[0]': mpf('0.0990342535546315715303181987358134'), 'xxCart[1]': mpf('0.059462651787259373641291448115671'), 'xxCart[2]': mpf('0.38639909144164541931510746078563'), 'xxCart[3]': mpf('0.0'), 'xxSph[0]': mpf('0.403296228846272470043624483970756'), 'xxSph[1]': mpf('0.290494285064808449775719376107343'), 'xxSph[2]': mpf('0.540732016614090760242561373161152'), 'xxSph[3]': mpf('0.0'), 'scalefactor_orthog[0]': mpf('0.676020313727977534668877495358117'), 'scalefactor_orthog[1]': mpf('0.301271283535614170401215840142554'), 'scalefactor_orthog[2]': mpf('0.115514459418273331776174469702154'), 'scalefactor_orthog[3]': mpf('0.0')}
| 5,253.410256
| 20,616
| 0.588702
| 35,069
| 204,883
| 3.432376
| 0.022099
| 0.078574
| 0.145717
| 0.067492
| 0.870906
| 0.803622
| 0.725687
| 0.702634
| 0.70221
| 0.698264
| 0
| 0.323305
| 0.046885
| 204,883
| 38
| 20,617
| 5,391.657895
| 0.293102
| 0.001337
| 0
| 0
| 1
| 0
| 0.695611
| 0.410979
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.133333
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
506344f75c57cf0de91b27e6c9b6cf5df9a71dab
| 5,661
|
py
|
Python
|
core/templatetags/sense.py
|
6ba/bbgo
|
dfa9b55b8d40c53940105333c2e03a3c6abddb88
|
[
"MIT"
] | 22
|
2017-07-13T04:07:03.000Z
|
2021-06-10T05:39:29.000Z
|
core/templatetags/sense.py
|
genonfire/bbgo
|
5f374f0b620f4dc3e106de5969f26f4585044605
|
[
"MIT"
] | 7
|
2017-08-25T06:33:45.000Z
|
2019-10-14T05:49:32.000Z
|
core/templatetags/sense.py
|
6ba/bbgo
|
dfa9b55b8d40c53940105333c2e03a3c6abddb88
|
[
"MIT"
] | 9
|
2017-12-31T02:45:58.000Z
|
2021-01-22T03:09:02.000Z
|
from django import template
from django.conf import settings
from django.urls import resolve
register = template.Library()
@register.inclusion_tag('show_sense.html', takes_context=True)
def show_up_sense(context, sense=''):
"""Show AdSense for SENSE_UP"""
request = context['request']
if sense == 'user':
user = request.user
if user.profile.sense_client and user.profile.sense_slot:
sense_client = user.profile.sense_client
sense_slot = user.profile.sense_slot
else:
sense_client = settings.SENSE_UP_CLIENT
sense_slot = settings.SENSE_UP_SLOT
else:
sense_client = settings.SENSE_UP_CLIENT
sense_slot = settings.SENSE_UP_SLOT
sense_enabled = settings.ENABLE_ADSENSE
if settings.DEBUG:
sense_enabled = False
return {
'sense_enabled': sense_enabled,
'sense_native': False,
'sense_client': sense_client,
'sense_slot': sense_slot,
}
@register.inclusion_tag('show_sense.html', takes_context=True)
def show_up_sense_native(context, sense=''):
"""Show AdSense Native for SENSE_UP"""
request = context['request']
if sense == 'user':
user = request.user
if user.profile.sense_client and user.profile.sense_slot:
sense_client = user.profile.sense_client
sense_slot = user.profile.sense_slot
else:
sense_client = settings.SENSE_UP_CLIENT
sense_slot = settings.SENSE_UP_SLOT
else:
sense_client = settings.SENSE_UP_CLIENT
sense_slot = settings.SENSE_UP_SLOT
sense_enabled = settings.ENABLE_ADSENSE
if settings.DEBUG:
sense_enabled = False
return {
'sense_enabled': sense_enabled,
'sense_native': True,
'sense_client': sense_client,
'sense_slot': sense_slot,
}
@register.inclusion_tag('show_sense.html', takes_context=True)
def show_down_sense(context, sense=''):
"""Show AdSense for SENSE_DOWN"""
request = context['request']
if sense == 'user':
user = request.user
if user.profile.sense_client and user.profile.sense_slot:
sense_client = user.profile.sense_client
sense_slot = user.profile.sense_slot
else:
sense_client = settings.SENSE_DOWN_CLIENT
sense_slot = settings.SENSE_DOWN_SLOT
else:
sense_client = settings.SENSE_DOWN_CLIENT
sense_slot = settings.SENSE_DOWN_SLOT
sense_enabled = settings.ENABLE_ADSENSE
if settings.DEBUG:
sense_enabled = False
return {
'sense_enabled': sense_enabled,
'sense_native': False,
'sense_client': sense_client,
'sense_slot': sense_slot,
}
@register.inclusion_tag('show_sense.html', takes_context=True)
def show_down_sense_native(context, sense=''):
"""Show AdSense Native for SENSE_DOWN"""
request = context['request']
if sense == 'user':
user = request.user
if user.profile.sense_client and user.profile.sense_slot:
sense_client = user.profile.sense_client
sense_slot = user.profile.sense_slot
else:
sense_client = settings.SENSE_DOWN_CLIENT
sense_slot = settings.SENSE_DOWN_SLOT
else:
sense_client = settings.SENSE_DOWN_CLIENT
sense_slot = settings.SENSE_DOWN_SLOT
sense_enabled = settings.ENABLE_ADSENSE
if settings.DEBUG:
sense_enabled = False
return {
'sense_enabled': sense_enabled,
'sense_native': True,
'sense_client': sense_client,
'sense_slot': sense_slot,
}
@register.inclusion_tag('show_sense.html', takes_context=True)
def show_side_sense(context, sense=''):
"""Show AdSense for SENSE_SIDE"""
request = context['request']
if sense == 'user':
user = request.user
if user.profile.sense_client and user.profile.sense_slot:
sense_client = user.profile.sense_client
sense_slot = user.profile.sense_slot
else:
sense_client = settings.SENSE_SIDE_CLIENT
sense_slot = settings.SENSE_SIDE_SLOT
else:
sense_client = settings.SENSE_SIDE_CLIENT
sense_slot = settings.SENSE_SIDE_SLOT
sense_enabled = settings.ENABLE_ADSENSE
if settings.DEBUG:
sense_enabled = False
else:
app = resolve(request.path).namespace
if app == 'vaults':
sense_enabled = False
return {
'sense_enabled': sense_enabled,
'sense_native': False,
'sense_client': sense_client,
'sense_slot': sense_slot,
}
@register.inclusion_tag('show_sense.html', takes_context=True)
def show_side_sense_native(context, sense=''):
"""Show AdSense Native for SENSE_SIDE"""
request = context['request']
if sense == 'user':
user = request.user
if user.profile.sense_client and user.profile.sense_slot:
sense_client = user.profile.sense_client
sense_slot = user.profile.sense_slot
else:
sense_client = settings.SENSE_SIDE_CLIENT
sense_slot = settings.SENSE_SIDE_SLOT
else:
sense_client = settings.SENSE_SIDE_CLIENT
sense_slot = settings.SENSE_SIDE_SLOT
sense_enabled = settings.ENABLE_ADSENSE
if settings.DEBUG:
sense_enabled = False
else:
app = resolve(request.path).namespace
if app == 'vaults':
sense_enabled = False
return {
'sense_enabled': sense_enabled,
'sense_native': True,
'sense_client': sense_client,
'sense_slot': sense_slot,
}
| 31.104396
| 65
| 0.653948
| 671
| 5,661
| 5.205663
| 0.062593
| 0.132265
| 0.109934
| 0.07558
| 0.970799
| 0.970799
| 0.970799
| 0.951045
| 0.951045
| 0.920985
| 0
| 0
| 0.258082
| 5,661
| 181
| 66
| 31.276243
| 0.831667
| 0.032503
| 0
| 0.893333
| 0
| 0
| 0.082629
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.02
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a002340e413d41946c1567064c1f223e07bfc84
| 150
|
py
|
Python
|
tests/test_folders.py
|
pierre-chaville/automlk
|
61386beba62f72360e1f5f8d6bcce17df653e2e8
|
[
"MIT"
] | 16
|
2017-09-05T12:26:11.000Z
|
2019-10-26T22:55:41.000Z
|
tests/test_folders.py
|
pierre-chaville/automlk
|
61386beba62f72360e1f5f8d6bcce17df653e2e8
|
[
"MIT"
] | 1
|
2018-02-07T11:16:43.000Z
|
2018-02-07T11:16:43.000Z
|
tests/test_folders.py
|
pierre-chaville/automlk
|
61386beba62f72360e1f5f8d6bcce17df653e2e8
|
[
"MIT"
] | 8
|
2017-09-21T01:20:52.000Z
|
2021-01-21T10:03:34.000Z
|
from automlk.dataset import get_data_folder, get_dataset_list
print('data folder:', get_data_folder())
print('list of datasets:', get_dataset_list())
| 37.5
| 61
| 0.8
| 23
| 150
| 4.869565
| 0.478261
| 0.267857
| 0.232143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 150
| 4
| 62
| 37.5
| 0.811594
| 0
| 0
| 0
| 0
| 0
| 0.192053
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
4a06be2576920b10ad2b3cba4b3ba8c3a1be628a
| 135
|
py
|
Python
|
gloomhaven/__init__.py
|
Softyy/gloomhaven-campaign-manager
|
0d704c248184edfe62b95d286203d58febb011c5
|
[
"MIT"
] | null | null | null |
gloomhaven/__init__.py
|
Softyy/gloomhaven-campaign-manager
|
0d704c248184edfe62b95d286203d58febb011c5
|
[
"MIT"
] | null | null | null |
gloomhaven/__init__.py
|
Softyy/gloomhaven-campaign-manager
|
0d704c248184edfe62b95d286203d58febb011c5
|
[
"MIT"
] | null | null | null |
from .server import *
from .components.cyto_reactor import *
from .components.modal import *
from .components.campaign_modal import *
| 22.5
| 40
| 0.792593
| 17
| 135
| 6.176471
| 0.470588
| 0.285714
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125926
| 135
| 5
| 41
| 27
| 0.889831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c57b7f0614b0654145222c14b82a3d349184dfd1
| 28,725
|
py
|
Python
|
vmware_nsxlib/tests/unit/v3/policy/test_ipsec_vpn_resources.py
|
salv-orlando/vmware-nsxlib
|
283eff2881b99c57b3908d03fb1c91da7dbdf46e
|
[
"Apache-2.0"
] | null | null | null |
vmware_nsxlib/tests/unit/v3/policy/test_ipsec_vpn_resources.py
|
salv-orlando/vmware-nsxlib
|
283eff2881b99c57b3908d03fb1c91da7dbdf46e
|
[
"Apache-2.0"
] | null | null | null |
vmware_nsxlib/tests/unit/v3/policy/test_ipsec_vpn_resources.py
|
salv-orlando/vmware-nsxlib
|
283eff2881b99c57b3908d03fb1c91da7dbdf46e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from vmware_nsxlib.tests.unit.v3.policy import test_resources
from vmware_nsxlib.v3.policy import ipsec_vpn_defs
from vmware_nsxlib.v3 import vpn_ipsec
TEST_TENANT = 'test'
class TestPolicyIkeProfileApi(test_resources.NsxPolicyLibTestCase):
def setUp(self, *args, **kwargs):
super(TestPolicyIkeProfileApi, self).setUp()
self.resourceApi = self.policy_lib.ipsec_vpn.ike_profile
def test_create(self):
name = 'd1'
obj_id = 'D1'
description = 'desc'
ike_version = vpn_ipsec.IkeVersionTypes.IKE_VERSION_V1
encryption_algorithms = [
vpn_ipsec.EncryptionAlgorithmTypes.ENCRYPTION_ALGORITHM_128]
digest_algorithms = [
vpn_ipsec.DigestAlgorithmTypes.DIGEST_ALGORITHM_SHA256]
dh_groups = [vpn_ipsec.DHGroupTypes.DH_GROUP_15]
sa_life_time = vpn_ipsec.IkeSALifetimeLimits.SA_LIFETIME_MIN + 1
tags = []
with mock.patch.object(self.policy_api,
"create_or_update") as api_call:
result = self.resourceApi.create_or_overwrite(
name,
profile_id=obj_id,
description=description,
ike_version=ike_version,
encryption_algorithms=encryption_algorithms,
digest_algorithms=digest_algorithms,
dh_groups=dh_groups,
sa_life_time=sa_life_time,
tags=tags,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
name=name,
description=description,
ike_version=ike_version,
encryption_algorithms=encryption_algorithms,
digest_algorithms=digest_algorithms,
dh_groups=dh_groups,
sa_life_time=sa_life_time,
tags=tags,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result)
def test_delete(self):
obj_id = '111'
with mock.patch.object(self.policy_api, "delete") as api_call:
self.resourceApi.delete(obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_get(self):
obj_id = '111'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
result = self.resourceApi.get(obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result['id'])
def test_get_by_name(self):
name = 'd1'
with mock.patch.object(
self.policy_api, "list",
return_value={'results': [{'display_name': name}]}) as api_call:
obj = self.resourceApi.get_by_name(name, tenant=TEST_TENANT)
self.assertIsNotNone(obj)
expected_def = self.resourceApi.entry_def(
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_list(self):
with mock.patch.object(self.policy_api, "list",
return_value={'results': []}) as api_call:
result = self.resourceApi.list(tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual([], result)
def test_update(self):
obj_id = '111'
name = 'new name'
description = 'new desc'
with self.mock_get(obj_id, 'old name'), \
self.mock_create_update() as update_call:
self.resourceApi.update(obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
self.assert_called_with_def(update_call, expected_def)
class TestPolicyTunnelProfileApi(test_resources.NsxPolicyLibTestCase):
def setUp(self, *args, **kwargs):
super(TestPolicyTunnelProfileApi, self).setUp()
self.resourceApi = self.policy_lib.ipsec_vpn.tunnel_profile
def test_create(self):
name = 'd1'
obj_id = 'D1'
description = 'desc'
enable_perfect_forward_secrecy = True
encryption_algorithms = [
vpn_ipsec.EncryptionAlgorithmTypes.ENCRYPTION_ALGORITHM_128]
digest_algorithms = [
vpn_ipsec.DigestAlgorithmTypes.DIGEST_ALGORITHM_SHA256]
dh_groups = [vpn_ipsec.DHGroupTypes.DH_GROUP_15]
sa_life_time = vpn_ipsec.IkeSALifetimeLimits.SA_LIFETIME_MIN + 1
tags = []
with mock.patch.object(self.policy_api,
"create_or_update") as api_call:
result = self.resourceApi.create_or_overwrite(
name,
profile_id=obj_id,
description=description,
enable_perfect_forward_secrecy=enable_perfect_forward_secrecy,
encryption_algorithms=encryption_algorithms,
digest_algorithms=digest_algorithms,
dh_groups=dh_groups,
sa_life_time=sa_life_time,
tags=tags,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
name=name,
description=description,
enable_perfect_forward_secrecy=enable_perfect_forward_secrecy,
encryption_algorithms=encryption_algorithms,
digest_algorithms=digest_algorithms,
dh_groups=dh_groups,
sa_life_time=sa_life_time,
tags=tags,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result)
def test_delete(self):
obj_id = '111'
with mock.patch.object(self.policy_api, "delete") as api_call:
self.resourceApi.delete(obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_get(self):
obj_id = '111'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
result = self.resourceApi.get(obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result['id'])
def test_get_by_name(self):
name = 'd1'
with mock.patch.object(
self.policy_api, "list",
return_value={'results': [{'display_name': name}]}) as api_call:
obj = self.resourceApi.get_by_name(name, tenant=TEST_TENANT)
self.assertIsNotNone(obj)
expected_def = self.resourceApi.entry_def(
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_list(self):
with mock.patch.object(self.policy_api, "list",
return_value={'results': []}) as api_call:
result = self.resourceApi.list(tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual([], result)
def test_update(self):
obj_id = '111'
name = 'new name'
description = 'new desc'
with self.mock_get(obj_id, 'old name'), \
self.mock_create_update() as update_call:
self.resourceApi.update(obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
self.assert_called_with_def(update_call, expected_def)
class TestPolicyDpdProfileApi(test_resources.NsxPolicyLibTestCase):
def setUp(self, *args, **kwargs):
super(TestPolicyDpdProfileApi, self).setUp()
self.resourceApi = self.policy_lib.ipsec_vpn.dpd_profile
def test_create(self):
name = 'd1'
obj_id = 'D1'
description = 'desc'
dpd_probe_interval = 7
enabled = True
tags = []
with mock.patch.object(self.policy_api,
"create_or_update") as api_call:
result = self.resourceApi.create_or_overwrite(
name,
profile_id=obj_id,
description=description,
dpd_probe_interval=dpd_probe_interval,
enabled=enabled,
tags=tags,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
name=name,
description=description,
dpd_probe_interval=dpd_probe_interval,
enabled=enabled,
tags=tags,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result)
def test_delete(self):
obj_id = '111'
with mock.patch.object(self.policy_api, "delete") as api_call:
self.resourceApi.delete(obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_get(self):
obj_id = '111'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
result = self.resourceApi.get(obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result['id'])
def test_get_by_name(self):
name = 'd1'
with mock.patch.object(
self.policy_api, "list",
return_value={'results': [{'display_name': name}]}) as api_call:
obj = self.resourceApi.get_by_name(name, tenant=TEST_TENANT)
self.assertIsNotNone(obj)
expected_def = self.resourceApi.entry_def(
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_list(self):
with mock.patch.object(self.policy_api, "list",
return_value={'results': []}) as api_call:
result = self.resourceApi.list(tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual([], result)
def test_update(self):
obj_id = '111'
name = 'new name'
description = 'new desc'
with self.mock_get(obj_id, 'old name'), \
self.mock_create_update() as update_call:
self.resourceApi.update(obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
profile_id=obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
self.assert_called_with_def(update_call, expected_def)
class TestPolicyVpnServiceApi(test_resources.NsxPolicyLibTestCase):
def setUp(self, *args, **kwargs):
super(TestPolicyVpnServiceApi, self).setUp()
self.resourceApi = self.policy_lib.ipsec_vpn.service
def test_create(self):
name = 'd1'
tier1_id = 'tier1'
obj_id = 'D1'
description = 'desc'
ike_log_level = vpn_ipsec.IkeLogLevelTypes.LOG_LEVEL_ERROR
enabled = True
tags = []
with mock.patch.object(self.policy_api,
"create_or_update") as api_call:
result = self.resourceApi.create_or_overwrite(
name,
tier1_id=tier1_id,
vpn_service_id=obj_id,
description=description,
ike_log_level=ike_log_level,
enabled=enabled,
tags=tags,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=obj_id,
name=name,
description=description,
ike_log_level=ike_log_level,
enabled=enabled,
tags=tags,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result)
def test_delete(self):
obj_id = '111'
tier1_id = 'tier1'
with mock.patch.object(self.policy_api, "delete") as api_call:
self.resourceApi.delete(tier1_id, obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_get(self):
obj_id = '111'
tier1_id = 'tier1'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
result = self.resourceApi.get(tier1_id, obj_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result['id'])
def test_get_by_name(self):
name = 'd1'
tier1_id = 'tier1'
with mock.patch.object(
self.policy_api, "list",
return_value={'results': [{'display_name': name}]}) as api_call:
obj = self.resourceApi.get_by_name(tier1_id, name,
tenant=TEST_TENANT)
self.assertIsNotNone(obj)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_list(self):
tier1_id = 'tier1'
with mock.patch.object(self.policy_api, "list",
return_value={'results': []}) as api_call:
result = self.resourceApi.list(tier1_id, tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual([], result)
def test_update(self):
obj_id = '111'
tier1_id = 'tier1'
name = 'new name'
description = 'new desc'
with self.mock_get(obj_id, 'old name'), \
self.mock_create_update() as update_call:
self.resourceApi.update(tier1_id, obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
self.assert_called_with_def(update_call, expected_def)
class TestPolicyVpnLocalEndpointApi(test_resources.NsxPolicyLibTestCase):
def setUp(self, *args, **kwargs):
super(TestPolicyVpnLocalEndpointApi, self).setUp()
self.resourceApi = self.policy_lib.ipsec_vpn.local_endpoint
def test_create(self):
name = 'EP1'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
obj_id = 'ep1'
description = 'desc'
local_address = '1.1.1.1'
local_id = '1'
tags = []
with mock.patch.object(self.policy_api,
"create_or_update") as api_call:
result = self.resourceApi.create_or_overwrite(
name,
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
endpoint_id=obj_id,
description=description,
local_address=local_address,
local_id=local_id,
tags=tags,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
endpoint_id=obj_id,
name=name,
description=description,
local_address=local_address,
local_id=local_id,
tags=tags,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result)
def test_delete(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "delete") as api_call:
self.resourceApi.delete(tier1_id, vpn_service_id, obj_id,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
endpoint_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_get(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
result = self.resourceApi.get(tier1_id, vpn_service_id, obj_id,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
endpoint_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result['id'])
def test_get_by_name(self):
name = 'd1'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(
self.policy_api, "list",
return_value={'results': [{'display_name': name}]}) as api_call:
obj = self.resourceApi.get_by_name(tier1_id, vpn_service_id, name,
tenant=TEST_TENANT)
self.assertIsNotNone(obj)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_list(self):
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "list",
return_value={'results': []}) as api_call:
result = self.resourceApi.list(tier1_id, vpn_service_id,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual([], result)
def test_update(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
name = 'new name'
description = 'new desc'
with self.mock_get(obj_id, 'old name'), \
self.mock_create_update() as update_call:
self.resourceApi.update(tier1_id, vpn_service_id, obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
endpoint_id=obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
self.assert_called_with_def(update_call, expected_def)
class TestPolicyVpnSessionApi(test_resources.NsxPolicyLibTestCase):
def setUp(self, *args, **kwargs):
super(TestPolicyVpnSessionApi, self).setUp()
self.resourceApi = self.policy_lib.ipsec_vpn.session
def test_create(self):
name = 'Sess1'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
obj_id = 'sess1'
description = 'desc'
enabled = True
peer_address = '2.2.2.2'
peer_id = '2'
psk = 'dummy'
rules = [self.resourceApi.build_rule(
'rule', 'dummy_id', source_cidrs=['1.1.1.0/24'])]
dpd_profile_id = 'dpd1'
ike_profile_id = 'ike1'
tunnel_profile_id = 'tunnel1'
local_endpoint_id = 'ep1'
tags = []
with mock.patch.object(self.policy_api,
"create_or_update") as api_call:
result = self.resourceApi.create_or_overwrite(
name,
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
session_id=obj_id,
description=description,
enabled=enabled,
peer_address=peer_address,
peer_id=peer_id,
psk=psk,
rules=rules,
dpd_profile_id=dpd_profile_id,
ike_profile_id=ike_profile_id,
tunnel_profile_id=tunnel_profile_id,
local_endpoint_id=local_endpoint_id,
tags=tags,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
service_id=self.resourceApi._locale_service_id(tier1_id),
vpn_service_id=vpn_service_id,
session_id=obj_id,
name=name,
description=description,
enabled=enabled,
peer_address=peer_address,
peer_id=peer_id,
psk=psk,
rules=rules,
dpd_profile_id=dpd_profile_id,
ike_profile_id=ike_profile_id,
tunnel_profile_id=tunnel_profile_id,
local_endpoint_id=local_endpoint_id,
tags=tags,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result)
def test_delete(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "delete") as api_call:
self.resourceApi.delete(tier1_id, vpn_service_id, obj_id,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
session_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_get(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
result = self.resourceApi.get(tier1_id, vpn_service_id, obj_id,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
session_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual(obj_id, result['id'])
def test_get_by_name(self):
name = 'd1'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(
self.policy_api, "list",
return_value={'results': [{'display_name': name}]}) as api_call:
obj = self.resourceApi.get_by_name(tier1_id, vpn_service_id, name,
tenant=TEST_TENANT)
self.assertIsNotNone(obj)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
def test_list(self):
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "list",
return_value={'results': []}) as api_call:
result = self.resourceApi.list(tier1_id, vpn_service_id,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
self.assertEqual([], result)
def test_update(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
name = 'new name'
description = 'new desc'
with self.mock_get(obj_id, 'old name'), \
self.mock_create_update() as update_call:
self.resourceApi.update(tier1_id, vpn_service_id, obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
expected_def = self.resourceApi.entry_def(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
session_id=obj_id,
name=name,
description=description,
tenant=TEST_TENANT)
self.assert_called_with_def(update_call, expected_def)
def test_get_status(self):
obj_id = '111'
tier1_id = 'tier1'
vpn_service_id = 'vpn1'
with mock.patch.object(self.policy_api, "get",
return_value={'id': obj_id}) as api_call:
self.resourceApi.get_status(tier1_id, vpn_service_id, obj_id,
tenant=TEST_TENANT)
expected_def = ipsec_vpn_defs.Tier1IPSecVpnSessionStatusDef(
tier1_id=tier1_id,
vpn_service_id=vpn_service_id,
session_id=obj_id,
tenant=TEST_TENANT)
self.assert_called_with_def(api_call, expected_def)
| 40.457746
| 79
| 0.571036
| 3,166
| 28,725
| 4.842072
| 0.060328
| 0.032616
| 0.077234
| 0.042009
| 0.907241
| 0.903523
| 0.898761
| 0.898761
| 0.898761
| 0.853033
| 0
| 0.012756
| 0.347746
| 28,725
| 709
| 80
| 40.51481
| 0.805455
| 0.020783
| 0
| 0.919811
| 0
| 0
| 0.030734
| 0
| 0
| 0
| 0
| 0
| 0.095912
| 1
| 0.06761
| false
| 0
| 0.006289
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5bac2c492d9c19004b5fb75d3b4be6b356ede2a
| 5,622
|
py
|
Python
|
tests/_local_test_timings/_test_shared_memory.py
|
nevinadalal/Hyperactive
|
3232ffeda70c5d4853b9e71aaf5d1e761c0db9c2
|
[
"MIT"
] | 4
|
2019-06-09T08:04:01.000Z
|
2019-06-26T05:45:09.000Z
|
tests/_local_test_timings/_test_shared_memory.py
|
SimonBlanke/hyperactive
|
2137f682dd6351dc5c4de8d0d3ad3bd637af57dd
|
[
"MIT"
] | null | null | null |
tests/_local_test_timings/_test_shared_memory.py
|
SimonBlanke/hyperactive
|
2137f682dd6351dc5c4de8d0d3ad3bd637af57dd
|
[
"MIT"
] | null | null | null |
import time
import numpy as np
import pandas as pd
from hyperactive import Hyperactive
def model(opt):
time.sleep(0.001)
return 0
def model1(opt):
time.sleep(0.001)
return 0
def model2(opt):
time.sleep(0.001)
return 0
def model3(opt):
time.sleep(0.001)
return 0
def model4(opt):
time.sleep(0.001)
return 0
search_space = {
"x1": list(range(2, 200)),
}
def test_shared_memory_0():
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory="share",
)
hyper.run()
d_time_1 = time.perf_counter() - c_time
n_jobs = 4
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=n_jobs,
memory="share",
)
hyper.run()
d_time_2 = time.perf_counter() - c_time
d_time_2 = d_time_2 / n_jobs
print("\n d_time_1 \n", d_time_1)
print("\n d_time_2 \n", d_time_2)
d_time = d_time_1 / d_time_2
assert d_time > 1.4
def test_shared_memory_1():
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory=True,
)
hyper.run()
d_time_1 = time.perf_counter() - c_time
n_jobs = 4
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=n_jobs,
memory=True,
)
hyper.run()
d_time_2 = time.perf_counter() - c_time
d_time_2 = d_time_2 / n_jobs
print("\n d_time_1 \n", d_time_1)
print("\n d_time_2 \n", d_time_2)
d_time = d_time_1 / d_time_2
assert d_time > 0.7
assert d_time < 1.3
def test_shared_memory_2():
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
)
hyper.run()
d_time_1 = time.perf_counter() - c_time
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory="share",
)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory="share",
)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory="share",
)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory="share",
)
hyper.run()
d_time_2 = time.perf_counter() - c_time
d_time_2 = d_time_2 / 4
print("\n d_time_1 \n", d_time_1)
print("\n d_time_2 \n", d_time_2)
d_time = d_time_1 / d_time_2
assert d_time > 1.4
def test_shared_memory_3():
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
)
hyper.run()
d_time_1 = time.perf_counter() - c_time
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory=True,
)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory=True,
)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory=True,
)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
memory=True,
)
hyper.run()
d_time_2 = time.perf_counter() - c_time
d_time_2 = d_time_2 / 4
print("\n d_time_1 \n", d_time_1)
print("\n d_time_2 \n", d_time_2)
d_time = d_time_1 / d_time_2
assert d_time > 0.7
assert d_time < 1.3
def test_shared_memory_warm_start_0():
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
)
hyper.run()
d_time_1 = time.perf_counter() - c_time
search_data0 = hyper.search_data(model)
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=4,
memory_warm_start=search_data0,
memory="share",
)
hyper.run()
d_time_2 = time.perf_counter() - c_time
d_time_2 = d_time_2 / 4
print("\n d_time_1 \n", d_time_1)
print("\n d_time_2 \n", d_time_2)
d_time = d_time_1 / d_time_2
assert d_time > 1.4
def test_shared_memory_warm_start_1():
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=1,
)
hyper.run()
d_time_1 = time.perf_counter() - c_time
search_data0 = hyper.search_data(model)
c_time = time.perf_counter()
hyper = Hyperactive(n_processes=1)
hyper.add_search(
model,
search_space,
n_iter=300,
n_jobs=4,
memory_warm_start=search_data0,
memory=True,
)
hyper.run()
d_time_2 = time.perf_counter() - c_time
d_time_2 = d_time_2 / 4
print("\n d_time_1 \n", d_time_1)
print("\n d_time_2 \n", d_time_2)
d_time = d_time_1 / d_time_2
assert d_time > 2.3
| 19.520833
| 43
| 0.580932
| 854
| 5,622
| 3.467213
| 0.063232
| 0.124958
| 0.074975
| 0.115502
| 0.947991
| 0.947991
| 0.941912
| 0.934144
| 0.899021
| 0.899021
| 0
| 0.053816
| 0.312522
| 5,622
| 287
| 44
| 19.58885
| 0.71229
| 0
| 0
| 0.84322
| 0
| 0
| 0.036464
| 0
| 0
| 0
| 0
| 0
| 0.033898
| 1
| 0.04661
| false
| 0
| 0.016949
| 0
| 0.084746
| 0.050847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5d98df08262923d855db0e898b69002913cf707
| 141
|
py
|
Python
|
stocker/__init__.py
|
aljazmedic/stock-prediction
|
5eb2c63c93ef44e51500dbfc2255d7d035e6b6dd
|
[
"MIT"
] | 1
|
2021-08-02T11:55:33.000Z
|
2021-08-02T11:55:33.000Z
|
stocker/__init__.py
|
aljazmedic/stock-prediction
|
5eb2c63c93ef44e51500dbfc2255d7d035e6b6dd
|
[
"MIT"
] | null | null | null |
stocker/__init__.py
|
aljazmedic/stock-prediction
|
5eb2c63c93ef44e51500dbfc2255d7d035e6b6dd
|
[
"MIT"
] | null | null | null |
from stocker.stock import Stock, add_indicators, SubCall
from stocker.stock_server import StockServer
from stocker.stock_exceptions import *
| 35.25
| 56
| 0.858156
| 19
| 141
| 6.210526
| 0.526316
| 0.279661
| 0.40678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099291
| 141
| 3
| 57
| 47
| 0.929134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a84c9ae85694717e4fa562ef3b190e7ea42d5bdf
| 36
|
py
|
Python
|
rllite/QuantileRegressionDQN/__init__.py
|
ZJU-RL/zjurl
|
6aab83aaba7249bc43d112f21787c45b111d8aa4
|
[
"MIT"
] | 3
|
2019-03-07T13:24:21.000Z
|
2019-08-08T12:49:51.000Z
|
rllite/QuantileRegressionDQN/__init__.py
|
tyGavinZJU/rllite
|
5c85eb11babb69a09604a04f0eb7bdc5a96f08f0
|
[
"MIT"
] | null | null | null |
rllite/QuantileRegressionDQN/__init__.py
|
tyGavinZJU/rllite
|
5c85eb11babb69a09604a04f0eb7bdc5a96f08f0
|
[
"MIT"
] | null | null | null |
from .QuantileRegressionDQN import *
| 36
| 36
| 0.861111
| 3
| 36
| 10.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a86fe2bf5ce3cc7a0eb79cf3fd003acd0804d412
| 16,949
|
py
|
Python
|
tests/src/main/python/rest/tests/extract/swagger_client/api/fhir_measures_api.py
|
IBM/quality-measure-and-cohort-service
|
8963227bf4941d6a5fdc641b37ca0f72da5a6f2b
|
[
"Apache-2.0"
] | 1
|
2020-10-05T15:10:03.000Z
|
2020-10-05T15:10:03.000Z
|
tests/src/main/python/rest/tests/extract/swagger_client/api/fhir_measures_api.py
|
IBM/quality-measure-and-cohort-service
|
8963227bf4941d6a5fdc641b37ca0f72da5a6f2b
|
[
"Apache-2.0"
] | null | null | null |
tests/src/main/python/rest/tests/extract/swagger_client/api/fhir_measures_api.py
|
IBM/quality-measure-and-cohort-service
|
8963227bf4941d6a5fdc641b37ca0f72da5a6f2b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
IBM Cohort Engine
Service to evaluate cohorts and measures # noqa: E501
OpenAPI spec version: 2.1.0 2022-02-18T21:50:45Z
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class FHIRMeasuresApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_measure_parameters(self, version, measure_identifier_value, fhir_data_server_config, **kwargs): # noqa: E501
"""Get measure parameters # noqa: E501
Retrieves the parameter information for libraries linked to by a measure # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_measure_parameters(version, measure_identifier_value, fhir_data_server_config, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: The release date of the version of the API you want to use. Specify dates in YYYY-MM-DD format. (required)
:param str measure_identifier_value: Used to identify the FHIR measure resource you would like the parameter information for using the Measure.Identifier.Value field. (required)
:param file fhir_data_server_config: A configuration file containing information needed to connect to the FHIR server. See https://github.com/Alvearie/quality-measure-and-cohort-service/blob/main/docs/user-guide/fhir-server-config.md for more details. Example Contents: <pre>{ \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"https://fhir-internal.dev:9443/fhir-server/api/v4\", \"user\": \"fhiruser\", \"password\": \"replaceWithfhiruserPassword\", \"logInfo\": [ \"ALL\" ], \"tenantId\": \"default\" }</pre> (required)
:param str measure_identifier_system: The system name used to provide a namespace for the measure identifier values. For example, if using social security numbers for the identifier values, one would use http://hl7.org/fhir/sid/us-ssn as the system value.
:param str measure_version: The version of the measure to retrieve as represented by the FHIR resource Measure.version field. If a value is not provided, the underlying code will atempt to resolve the most recent version assuming a <Major>.<Minor>.<Patch> format (ie if versions 1.0.0 and 2.0.0 both exist, the code will return the 2.0.0 version)
:return: MeasureParameterInfoList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_measure_parameters_with_http_info(version, measure_identifier_value, fhir_data_server_config, **kwargs) # noqa: E501
else:
(data) = self.get_measure_parameters_with_http_info(version, measure_identifier_value, fhir_data_server_config, **kwargs) # noqa: E501
return data
def get_measure_parameters_with_http_info(self, version, measure_identifier_value, fhir_data_server_config, **kwargs): # noqa: E501
"""Get measure parameters # noqa: E501
Retrieves the parameter information for libraries linked to by a measure # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_measure_parameters_with_http_info(version, measure_identifier_value, fhir_data_server_config, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: The release date of the version of the API you want to use. Specify dates in YYYY-MM-DD format. (required)
:param str measure_identifier_value: Used to identify the FHIR measure resource you would like the parameter information for using the Measure.Identifier.Value field. (required)
:param file fhir_data_server_config: A configuration file containing information needed to connect to the FHIR server. See https://github.com/Alvearie/quality-measure-and-cohort-service/blob/main/docs/user-guide/fhir-server-config.md for more details. Example Contents: <pre>{ \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"https://fhir-internal.dev:9443/fhir-server/api/v4\", \"user\": \"fhiruser\", \"password\": \"replaceWithfhiruserPassword\", \"logInfo\": [ \"ALL\" ], \"tenantId\": \"default\" }</pre> (required)
:param str measure_identifier_system: The system name used to provide a namespace for the measure identifier values. For example, if using social security numbers for the identifier values, one would use http://hl7.org/fhir/sid/us-ssn as the system value.
:param str measure_version: The version of the measure to retrieve as represented by the FHIR resource Measure.version field. If a value is not provided, the underlying code will atempt to resolve the most recent version assuming a <Major>.<Minor>.<Patch> format (ie if versions 1.0.0 and 2.0.0 both exist, the code will return the 2.0.0 version)
:return: MeasureParameterInfoList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['version', 'measure_identifier_value', 'fhir_data_server_config', 'measure_identifier_system', 'measure_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_measure_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'version' is set
if self.api_client.client_side_validation and ('version' not in params or
params['version'] is None): # noqa: E501
raise ValueError("Missing the required parameter `version` when calling `get_measure_parameters`") # noqa: E501
# verify the required parameter 'measure_identifier_value' is set
if self.api_client.client_side_validation and ('measure_identifier_value' not in params or
params['measure_identifier_value'] is None): # noqa: E501
raise ValueError("Missing the required parameter `measure_identifier_value` when calling `get_measure_parameters`") # noqa: E501
# verify the required parameter 'fhir_data_server_config' is set
if self.api_client.client_side_validation and ('fhir_data_server_config' not in params or
params['fhir_data_server_config'] is None): # noqa: E501
raise ValueError("Missing the required parameter `fhir_data_server_config` when calling `get_measure_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'measure_identifier_value' in params:
path_params['measure_identifier_value'] = params['measure_identifier_value'] # noqa: E501
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
if 'measure_identifier_system' in params:
query_params.append(('measure_identifier_system', params['measure_identifier_system'])) # noqa: E501
if 'measure_version' in params:
query_params.append(('measure_version', params['measure_version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
if 'fhir_data_server_config' in params:
local_var_files['fhir_data_server_config'] = params['fhir_data_server_config'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/fhir/measure/identifier/{measure_identifier_value}/parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MeasureParameterInfoList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_measure_parameters_by_id(self, version, measure_id, fhir_data_server_config, **kwargs): # noqa: E501
"""Get measure parameters by id # noqa: E501
Retrieves the parameter information for libraries linked to by a measure # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_measure_parameters_by_id(version, measure_id, fhir_data_server_config, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: The release date of the version of the API you want to use. Specify dates in YYYY-MM-DD format. (required)
:param str measure_id: FHIR measure resource id for the measure you would like the parameter information for using the Measure.id field. (required)
:param file fhir_data_server_config: A configuration file containing information needed to connect to the FHIR server. See https://github.com/Alvearie/quality-measure-and-cohort-service/blob/main/docs/user-guide/fhir-server-config.md for more details. Example Contents: <pre>{ \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"https://fhir-internal.dev:9443/fhir-server/api/v4\", \"user\": \"fhiruser\", \"password\": \"replaceWithfhiruserPassword\", \"logInfo\": [ \"ALL\" ], \"tenantId\": \"default\" }</pre> (required)
:return: MeasureParameterInfoList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_measure_parameters_by_id_with_http_info(version, measure_id, fhir_data_server_config, **kwargs) # noqa: E501
else:
(data) = self.get_measure_parameters_by_id_with_http_info(version, measure_id, fhir_data_server_config, **kwargs) # noqa: E501
return data
def get_measure_parameters_by_id_with_http_info(self, version, measure_id, fhir_data_server_config, **kwargs): # noqa: E501
"""Get measure parameters by id # noqa: E501
Retrieves the parameter information for libraries linked to by a measure # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_measure_parameters_by_id_with_http_info(version, measure_id, fhir_data_server_config, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: The release date of the version of the API you want to use. Specify dates in YYYY-MM-DD format. (required)
:param str measure_id: FHIR measure resource id for the measure you would like the parameter information for using the Measure.id field. (required)
:param file fhir_data_server_config: A configuration file containing information needed to connect to the FHIR server. See https://github.com/Alvearie/quality-measure-and-cohort-service/blob/main/docs/user-guide/fhir-server-config.md for more details. Example Contents: <pre>{ \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"https://fhir-internal.dev:9443/fhir-server/api/v4\", \"user\": \"fhiruser\", \"password\": \"replaceWithfhiruserPassword\", \"logInfo\": [ \"ALL\" ], \"tenantId\": \"default\" }</pre> (required)
:return: MeasureParameterInfoList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['version', 'measure_id', 'fhir_data_server_config'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_measure_parameters_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'version' is set
if self.api_client.client_side_validation and ('version' not in params or
params['version'] is None): # noqa: E501
raise ValueError("Missing the required parameter `version` when calling `get_measure_parameters_by_id`") # noqa: E501
# verify the required parameter 'measure_id' is set
if self.api_client.client_side_validation and ('measure_id' not in params or
params['measure_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `measure_id` when calling `get_measure_parameters_by_id`") # noqa: E501
# verify the required parameter 'fhir_data_server_config' is set
if self.api_client.client_side_validation and ('fhir_data_server_config' not in params or
params['fhir_data_server_config'] is None): # noqa: E501
raise ValueError("Missing the required parameter `fhir_data_server_config` when calling `get_measure_parameters_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'measure_id' in params:
path_params['measure_id'] = params['measure_id'] # noqa: E501
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
if 'fhir_data_server_config' in params:
local_var_files['fhir_data_server_config'] = params['fhir_data_server_config'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/fhir/measure/{measure_id}/parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MeasureParameterInfoList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 62.084249
| 605
| 0.667355
| 2,108
| 16,949
| 5.1537
| 0.117647
| 0.036082
| 0.041237
| 0.05891
| 0.929492
| 0.924153
| 0.90648
| 0.896171
| 0.884113
| 0.883008
| 0
| 0.016455
| 0.239837
| 16,949
| 272
| 606
| 62.3125
| 0.826762
| 0.480972
| 0
| 0.695035
| 1
| 0
| 0.248124
| 0.134457
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035461
| false
| 0
| 0.028369
| 0
| 0.113475
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a89fbc3878b10079d5f14de621aedbd27950b381
| 6,525
|
py
|
Python
|
tests/test_sphinxawesome_codelinter.py
|
kai687/sphinxawesome-codelinter
|
b544fee3268b33b2db1d2cf0fb1dd9e30fa8dff8
|
[
"MIT"
] | 4
|
2020-06-05T03:48:46.000Z
|
2020-11-12T19:51:12.000Z
|
tests/test_sphinxawesome_codelinter.py
|
kai687/sphinxawesome-codelinter
|
b544fee3268b33b2db1d2cf0fb1dd9e30fa8dff8
|
[
"MIT"
] | 79
|
2020-01-17T20:36:48.000Z
|
2022-03-27T18:09:28.000Z
|
tests/test_sphinxawesome_codelinter.py
|
kai687/sphinxawesome-codelinter
|
b544fee3268b33b2db1d2cf0fb1dd9e30fa8dff8
|
[
"MIT"
] | null | null | null |
"""Unit tests for the sphinxawesome.codelinter extension."""
import os
from io import StringIO
from pathlib import Path
import pytest
from sphinx.application import Sphinx
from sphinxawesome.codelinter import __version__
def test_returns_version() -> None:
"""It has the correct version."""
assert __version__ == "1.0.5"
def test_can_access_rootdir(rootdir: Path) -> None:
"""It can access the test files."""
conf_file = rootdir / "test-root" / "conf.py"
index_file = rootdir / "test-root" / "index.rst"
assert conf_file.exists()
assert index_file.exists()
@pytest.mark.sphinx("dummy")
def test_dummy_compiles_minimal_configuration(app: Sphinx) -> None:
"""It compiles a minimal configuration with the `dummy` builder."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
@pytest.mark.sphinx(
"dummy",
confoverrides={"extensions": ["sphinxawesome.codelinter"]},
)
def test_dummy_compiles_with_extension(app: Sphinx) -> None:
"""It compiles with a minimal configuration with the extension added."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "codelinter_languages" in app.config
@pytest.mark.sphinx(
"codelinter",
confoverrides={"extensions": ["sphinxawesome.codelinter"]},
)
def test_codelinter_compiles_without_languages(app: Sphinx, status: StringIO) -> None:
"""It builds with the codelinter builder without any languages."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "codelinter_languages" in app.config
assert "[Line 6] linting json" not in status.getvalue()
assert "[Line 10] linting" not in status.getvalue()
assert "[Line 14] linting json" not in status.getvalue()
assert "[Line 18] linting yaml" not in status.getvalue()
assert "[Line 26] linting yaml" not in status.getvalue()
assert "[Line 34] linting json" not in status.getvalue()
assert "[Line 38] linting" not in status.getvalue()
@pytest.mark.sphinx(
"dummy",
confoverrides={
"extensions": ["sphinxawesome.codelinter"],
"codelinter_languages": {"json": "python -m json.tool"},
},
)
def test_dummy_compiles_with_codelinter_languages(
app: Sphinx, status: StringIO
) -> None:
"""It compiles with the dummy builder with configured codelinter_languages."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "[Line 6] linting json" not in status.getvalue()
assert "[Line 10] linting" not in status.getvalue()
assert "[Line 14] linting json" not in status.getvalue()
assert "[Line 18] linting yaml" not in status.getvalue()
assert "[Line 26] linting yaml" not in status.getvalue()
assert "[Line 34] linting json" not in status.getvalue()
assert "[Line 38] linting" not in status.getvalue()
@pytest.mark.sphinx(
"codelinter",
confoverrides={
"extensions": ["sphinxawesome.codelinter"],
"codelinter_languages": {"json": "does not exist"},
},
)
def test_codelinter_raises_warning_on_non_existing_tool(
app: Sphinx, status: StringIO, warning: StringIO
) -> None:
"""It raises a warning for a non-existing linter."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "does not exist" in warning.getvalue()
@pytest.mark.sphinx(
"codelinter",
confoverrides={
"extensions": ["sphinxawesome.codelinter"],
"codelinter_languages": {"json": "python -m json.tool"},
},
)
def test_codelinter_lints_json(
app: Sphinx, status: StringIO, warning: StringIO
) -> None:
"""It lints JSON code blocks."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "[Line 6] linting json" in status.getvalue()
assert "[Line 10] linting" not in status.getvalue()
assert "[Line 14] linting json" in status.getvalue()
assert "[Line 18] linting yaml" not in status.getvalue()
assert "[Line 26] linting yaml" not in status.getvalue()
assert "[Line 34] linting json" in status.getvalue()
assert "[Line 38] linting" not in status.getvalue()
assert "Problem in json" in warning.getvalue()
@pytest.mark.sphinx(
"codelinter",
confoverrides={
"extensions": ["sphinxawesome.codelinter"],
"codelinter_languages": {"yaml": "yamllint -"},
},
)
def test_codelinter_lints_yaml(
app: Sphinx, status: StringIO, warning: StringIO
) -> None:
"""It lints YAML code blocks."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "[Line 6] linting json" not in status.getvalue()
assert "[Line 10] linting" not in status.getvalue()
assert "[Line 14] linting json" not in status.getvalue()
assert "[Line 18] linting yaml" in status.getvalue()
assert "[Line 26] linting yaml" in status.getvalue()
assert "[Line 34] linting json" not in status.getvalue()
assert "[Line 38] linting" not in status.getvalue()
assert "Problem in yaml" in warning.getvalue()
@pytest.mark.sphinx(
"codelinter",
srcdir="example",
confoverrides={
"extensions": ["sphinxawesome.codelinter"],
"codelinter_languages": {"yaml": "yamllint -", "json": "python -m json.tool"},
},
)
def test_codelinter_lints_json_and_yaml(
app: Sphinx, status: StringIO, warning: StringIO
) -> None:
"""It lints both JSON and YAML code blocks."""
if app.builder is not None:
app.builder.build_all()
assert os.path.exists(app.outdir)
assert not os.listdir(app.outdir)
assert "[Line 6] linting json" in status.getvalue()
assert "[Line 10] linting" not in status.getvalue()
assert "[Line 14] linting json" in status.getvalue()
assert "[Line 18] linting yaml" in status.getvalue()
assert "[Line 26] linting yaml" in status.getvalue()
assert "[Line 34] linting json" in status.getvalue()
assert "[Line 38] linting" not in status.getvalue()
assert "Problem in yaml" in warning.getvalue()
assert "Problem in json" in warning.getvalue()
| 33.80829
| 86
| 0.682299
| 853
| 6,525
| 5.139508
| 0.116061
| 0.079836
| 0.127737
| 0.165602
| 0.819343
| 0.788321
| 0.785128
| 0.761861
| 0.689553
| 0.678376
| 0
| 0.012915
| 0.193103
| 6,525
| 192
| 87
| 33.984375
| 0.819753
| 0.079234
| 0
| 0.733333
| 0
| 0
| 0.234039
| 0.028226
| 0
| 0
| 0
| 0
| 0.406667
| 1
| 0.066667
| false
| 0
| 0.04
| 0
| 0.106667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a8a60d0b148ee23bd255073fee92f0d8b7f04a05
| 186
|
py
|
Python
|
misc/teardown.py
|
fpolignano/arcgis-python-api
|
0d0100772f3fd35b6105f088a98ae73b0a178268
|
[
"Apache-2.0"
] | 2
|
2020-11-23T23:06:04.000Z
|
2020-11-23T23:06:07.000Z
|
misc/teardown.py
|
fpolignano/arcgis-python-api
|
0d0100772f3fd35b6105f088a98ae73b0a178268
|
[
"Apache-2.0"
] | null | null | null |
misc/teardown.py
|
fpolignano/arcgis-python-api
|
0d0100772f3fd35b6105f088a98ae73b0a178268
|
[
"Apache-2.0"
] | null | null | null |
from _common import *
print("-*-*-*-*-*-*-*-*-*-*-*Teardown begins*-*-*-*-*-*-*-*-*-*-*-*-")
delete_for_users()
print("-*-*-*-*-*-*-*-*-*-*-*Teardown ends*-*-*-*-*-*-*-*-*-*-*-*-*-")
| 23.25
| 70
| 0.370968
| 12
| 186
| 5.5
| 0.833333
| 0.393939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 186
| 7
| 71
| 26.571429
| 0.37931
| 0
| 0
| 0
| 0
| 0
| 0.655914
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
a8c24b4553b4c9303d0e1fa64fcaa485c94095e7
| 21,803
|
py
|
Python
|
emsapi/operations/asset_operations.py
|
ge-flight-analytics/emsapi-python
|
2e3a53529758f1bd7a2a850119b1cc1b5ac552e3
|
[
"MIT"
] | null | null | null |
emsapi/operations/asset_operations.py
|
ge-flight-analytics/emsapi-python
|
2e3a53529758f1bd7a2a850119b1cc1b5ac552e3
|
[
"MIT"
] | 2
|
2020-01-16T00:04:35.000Z
|
2021-05-26T21:04:06.000Z
|
emsapi/operations/asset_operations.py
|
ge-flight-analytics/emsapi-python
|
2e3a53529758f1bd7a2a850119b1cc1b5ac552e3
|
[
"MIT"
] | 1
|
2021-02-23T08:25:12.000Z
|
2021-02-23T08:25:12.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from .. import models
class AssetOperations(object):
"""AssetOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_fleets(
self, ems_system_id, custom_headers=None, raw=False, **operation_config):
"""Returns the list of fleets the user has access to in their security
context.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_fleets.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[AdiEmsWebApiV2DtoAssetFleet]', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_fleets.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/fleets'}
def get_fleet(
self, ems_system_id, fleet_id, custom_headers=None, raw=False, **operation_config):
"""Returns information for a fleet on the system.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param fleet_id: The unique identifier of the fleet of interest.
:type fleet_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_fleet.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int'),
'fleetId': self._serialize.url("fleet_id", fleet_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AdiEmsWebApiV2DtoAssetFleet', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 404:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_fleet.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/fleets/{fleetId}'}
def get_aircraft(
self, ems_system_id, fleet_id=None, custom_headers=None, raw=False, **operation_config):
"""Returns the list of aircraft the user has access to in their security
context.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param fleet_id: If specified, returns only the aircraft belonging to
the indicated fleet.
:type fleet_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_aircraft.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if fleet_id is not None:
query_parameters['fleetId'] = self._serialize.query("fleet_id", fleet_id, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[AdiEmsWebApiV2DtoAssetAircraft]', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 404:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_aircraft.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/aircraft'}
def get_aircraft_by_id(
self, ems_system_id, aircraft_id, custom_headers=None, raw=False, **operation_config):
"""Returns information for an aircraft on the system.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param aircraft_id: The unique identifier of the aircraft of interest.
:type aircraft_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_aircraft_by_id.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int'),
'aircraftId': self._serialize.url("aircraft_id", aircraft_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AdiEmsWebApiV2DtoAssetAircraft', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 404:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_aircraft_by_id.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/aircraft/{aircraftId}'}
def get_flight_phases(
self, ems_system_id, custom_headers=None, raw=False, **operation_config):
"""Returns the list of flight phases.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_flight_phases.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[AdiEmsWebApiV2DtoAssetFlightPhase]', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_flight_phases.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/flight-phases'}
def get_flight_phase(
self, ems_system_id, flight_phase_id, custom_headers=None, raw=False, **operation_config):
"""Returns information for a flight phase on the system.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param flight_phase_id: The unique identifier of the flight phase of
interest.
:type flight_phase_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_flight_phase.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int'),
'flightPhaseId': self._serialize.url("flight_phase_id", flight_phase_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AdiEmsWebApiV2DtoAssetFlightPhase', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 404:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_flight_phase.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/flight-phases/{flightPhaseId}'}
def get_airports(
self, ems_system_id, include_all=None, custom_headers=None, raw=False, **operation_config):
"""Returns the list of airports that have been visited by the EMS system
or all the airports available
on the EMS system.
If includeAll is left as false just the airports that are significant
to the flight data are included.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param include_all: If true, include all the airports on this EMS
system. Defaults to false.
:type include_all: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_airports.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if include_all is not None:
query_parameters['includeAll'] = self._serialize.query("include_all", include_all, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[AdiEmsWebApiV2DtoAssetAirport]', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_airports.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/airports'}
def get_airport(
self, ems_system_id, airport_id, custom_headers=None, raw=False, **operation_config):
"""Returns information for an airport on the system.
:param ems_system_id: The unique identifier of the system containing
the EMS data.
:type ems_system_id: int
:param airport_id: The unique identifier of the airport of interest.
:type airport_id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = self.get_airport.metadata['url']
path_format_arguments = {
'emsSystemId': self._serialize.url("ems_system_id", ems_system_id, 'int'),
'airportId': self._serialize.url("airport_id", airport_id, 'int')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 401, 404, 503]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AdiEmsWebApiV2DtoAssetAirport', response)
if response.status_code == 401:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 404:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if response.status_code == 503:
deserialized = self._deserialize('AdiEmsWebApiModelError', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_airport.metadata = {'url': '/v2/ems-systems/{emsSystemId}/assets/airports/{airportId}'}
| 41.928846
| 109
| 0.663303
| 2,280
| 21,803
| 6.145614
| 0.070614
| 0.027619
| 0.031402
| 0.052812
| 0.883743
| 0.876677
| 0.865829
| 0.865829
| 0.845561
| 0.838139
| 0
| 0.011644
| 0.247626
| 21,803
| 519
| 110
| 42.009634
| 0.842538
| 0.30638
| 0
| 0.746032
| 1
| 0
| 0.121547
| 0.080294
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.011905
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a8cda99de52cf4c424d55c7078cf1753656569e4
| 146
|
py
|
Python
|
test/unit/module/test_module.py
|
juraj-ivadolabs/playground
|
08d653dfe45499557369e9f1fd1bb08441a19159
|
[
"MIT"
] | null | null | null |
test/unit/module/test_module.py
|
juraj-ivadolabs/playground
|
08d653dfe45499557369e9f1fd1bb08441a19159
|
[
"MIT"
] | null | null | null |
test/unit/module/test_module.py
|
juraj-ivadolabs/playground
|
08d653dfe45499557369e9f1fd1bb08441a19159
|
[
"MIT"
] | null | null | null |
from module.module import add_one
# def test_wrong_answer():
# assert add_one(3) == 5
def test_correct_answer():
assert add_one(1) == 2
| 18.25
| 33
| 0.691781
| 24
| 146
| 3.916667
| 0.625
| 0.191489
| 0.319149
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033898
| 0.191781
| 146
| 7
| 34
| 20.857143
| 0.762712
| 0.349315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7662ca90d5e742de192f3855581f601f7360568a
| 584,934
|
py
|
Python
|
esociallib/v2_04/evtAdmissao.py
|
akretion/esociallib
|
2472d68b45610638cf10d87aeed48b917ebae6d8
|
[
"MIT"
] | 6
|
2018-02-16T09:59:35.000Z
|
2021-09-01T20:40:02.000Z
|
esociallib/v2_04/evtAdmissao.py
|
akretion/esociallib
|
2472d68b45610638cf10d87aeed48b917ebae6d8
|
[
"MIT"
] | 2
|
2018-02-02T19:32:21.000Z
|
2019-01-25T14:43:05.000Z
|
esociallib/v2_04/evtAdmissao.py
|
akretion/esociallib
|
2472d68b45610638cf10d87aeed48b917ebae6d8
|
[
"MIT"
] | 2
|
2018-05-03T17:16:38.000Z
|
2021-04-02T19:17:31.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Tue Oct 10 00:42:19 2017 by generateDS.py version 2.28b.
# Python 2.7.12 (default, Nov 19 2016, 06:48:10) [GCC 5.4.0 20160609]
#
# Command line options:
# ('--no-process-includes', '')
# ('-o', 'esociallib/v2_04/evtAdmissao.py')
#
# Command line arguments:
# schemas/v2_04/evtAdmissao.xsd
#
# Command line:
# /usr/local/bin/generateDS --no-process-includes -o "esociallib/v2_04/evtAdmissao.py" schemas/v2_04/evtAdmissao.xsd
#
# Current working directory (os.getcwd()):
# esociallib
#
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class eSocial(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, evtAdmissao=None, Signature=None):
self.original_tagname_ = None
self.evtAdmissao = evtAdmissao
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, eSocial)
if subclass is not None:
return subclass(*args_, **kwargs_)
if eSocial.subclass:
return eSocial.subclass(*args_, **kwargs_)
else:
return eSocial(*args_, **kwargs_)
factory = staticmethod(factory)
def get_evtAdmissao(self): return self.evtAdmissao
def set_evtAdmissao(self, evtAdmissao): self.evtAdmissao = evtAdmissao
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.evtAdmissao is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='eSocial', namespacedef_=' xmlns:ds="http://www.w3.org/2000/09/xmldsig#" ', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('eSocial')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='eSocial')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='eSocial', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='eSocial'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='eSocial', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.evtAdmissao is not None:
self.evtAdmissao.export(outfile, level, namespace_, name_='evtAdmissao', pretty_print=pretty_print)
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % ('ds:', self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), 'ds:', eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'evtAdmissao':
obj_ = evtAdmissao.factory()
obj_.build(child_)
self.evtAdmissao = obj_
obj_.original_tagname_ = 'evtAdmissao'
elif nodeName_ == 'Signature':
Signature_ = child_.text
Signature_ = self.gds_validate_string(Signature_, node, 'Signature')
self.Signature = Signature_
# end class eSocial
class evtAdmissao(GeneratedsSuper):
"""Evento Cadastramento Inicial do Vínculo e Admissão / Ingresso de
Trabalhador"""
subclass = None
superclass = None
def __init__(self, Id=None, ideEvento=None, ideEmpregador=None, trabalhador=None, vinculo=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.ideEvento = ideEvento
self.ideEmpregador = ideEmpregador
self.trabalhador = trabalhador
self.vinculo = vinculo
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, evtAdmissao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if evtAdmissao.subclass:
return evtAdmissao.subclass(*args_, **kwargs_)
else:
return evtAdmissao(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ideEvento(self): return self.ideEvento
def set_ideEvento(self, ideEvento): self.ideEvento = ideEvento
def get_ideEmpregador(self): return self.ideEmpregador
def set_ideEmpregador(self, ideEmpregador): self.ideEmpregador = ideEmpregador
def get_trabalhador(self): return self.trabalhador
def set_trabalhador(self, trabalhador): self.trabalhador = trabalhador
def get_vinculo(self): return self.vinculo
def set_vinculo(self, vinculo): self.vinculo = vinculo
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.ideEvento is not None or
self.ideEmpregador is not None or
self.trabalhador is not None or
self.vinculo is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='evtAdmissao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('evtAdmissao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='evtAdmissao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='evtAdmissao', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='evtAdmissao'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='', name_='evtAdmissao', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ideEvento is not None:
self.ideEvento.export(outfile, level, namespace_, name_='ideEvento', pretty_print=pretty_print)
if self.ideEmpregador is not None:
self.ideEmpregador.export(outfile, level, namespace_, name_='ideEmpregador', pretty_print=pretty_print)
if self.trabalhador is not None:
self.trabalhador.export(outfile, level, namespace_, name_='trabalhador', pretty_print=pretty_print)
if self.vinculo is not None:
self.vinculo.export(outfile, level, namespace_, name_='vinculo', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ideEvento':
obj_ = TIdeEveTrab.factory()
obj_.build(child_)
self.ideEvento = obj_
obj_.original_tagname_ = 'ideEvento'
elif nodeName_ == 'ideEmpregador':
obj_ = TEmpregador.factory()
obj_.build(child_)
self.ideEmpregador = obj_
obj_.original_tagname_ = 'ideEmpregador'
elif nodeName_ == 'trabalhador':
obj_ = trabalhador.factory()
obj_.build(child_)
self.trabalhador = obj_
obj_.original_tagname_ = 'trabalhador'
elif nodeName_ == 'vinculo':
obj_ = vinculo.factory()
obj_.build(child_)
self.vinculo = obj_
obj_.original_tagname_ = 'vinculo'
# end class evtAdmissao
class trabalhador(GeneratedsSuper):
"""Informações Pessoais do Trabalhador"""
subclass = None
superclass = None
def __init__(self, cpfTrab=None, nisTrab=None, nmTrab=None, sexo=None, racaCor=None, estCiv=None, grauInstr=None, indPriEmpr=None, nmSoc=None, nascimento=None, documentos=None, endereco=None, trabEstrangeiro=None, infoDeficiencia=None, dependente=None, aposentadoria=None, contato=None):
self.original_tagname_ = None
self.cpfTrab = cpfTrab
self.nisTrab = nisTrab
self.nmTrab = nmTrab
self.sexo = sexo
self.racaCor = racaCor
self.estCiv = estCiv
self.grauInstr = grauInstr
self.indPriEmpr = indPriEmpr
self.nmSoc = nmSoc
self.nascimento = nascimento
self.documentos = documentos
self.endereco = endereco
self.trabEstrangeiro = trabEstrangeiro
self.infoDeficiencia = infoDeficiencia
if dependente is None:
self.dependente = []
else:
self.dependente = dependente
self.aposentadoria = aposentadoria
self.contato = contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, trabalhador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if trabalhador.subclass:
return trabalhador.subclass(*args_, **kwargs_)
else:
return trabalhador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cpfTrab(self): return self.cpfTrab
def set_cpfTrab(self, cpfTrab): self.cpfTrab = cpfTrab
def get_nisTrab(self): return self.nisTrab
def set_nisTrab(self, nisTrab): self.nisTrab = nisTrab
def get_nmTrab(self): return self.nmTrab
def set_nmTrab(self, nmTrab): self.nmTrab = nmTrab
def get_sexo(self): return self.sexo
def set_sexo(self, sexo): self.sexo = sexo
def get_racaCor(self): return self.racaCor
def set_racaCor(self, racaCor): self.racaCor = racaCor
def get_estCiv(self): return self.estCiv
def set_estCiv(self, estCiv): self.estCiv = estCiv
def get_grauInstr(self): return self.grauInstr
def set_grauInstr(self, grauInstr): self.grauInstr = grauInstr
def get_indPriEmpr(self): return self.indPriEmpr
def set_indPriEmpr(self, indPriEmpr): self.indPriEmpr = indPriEmpr
def get_nmSoc(self): return self.nmSoc
def set_nmSoc(self, nmSoc): self.nmSoc = nmSoc
def get_nascimento(self): return self.nascimento
def set_nascimento(self, nascimento): self.nascimento = nascimento
def get_documentos(self): return self.documentos
def set_documentos(self, documentos): self.documentos = documentos
def get_endereco(self): return self.endereco
def set_endereco(self, endereco): self.endereco = endereco
def get_trabEstrangeiro(self): return self.trabEstrangeiro
def set_trabEstrangeiro(self, trabEstrangeiro): self.trabEstrangeiro = trabEstrangeiro
def get_infoDeficiencia(self): return self.infoDeficiencia
def set_infoDeficiencia(self, infoDeficiencia): self.infoDeficiencia = infoDeficiencia
def get_dependente(self): return self.dependente
def set_dependente(self, dependente): self.dependente = dependente
def add_dependente(self, value): self.dependente.append(value)
def insert_dependente_at(self, index, value): self.dependente.insert(index, value)
def replace_dependente_at(self, index, value): self.dependente[index] = value
def get_aposentadoria(self): return self.aposentadoria
def set_aposentadoria(self, aposentadoria): self.aposentadoria = aposentadoria
def get_contato(self): return self.contato
def set_contato(self, contato): self.contato = contato
def hasContent_(self):
if (
self.cpfTrab is not None or
self.nisTrab is not None or
self.nmTrab is not None or
self.sexo is not None or
self.racaCor is not None or
self.estCiv is not None or
self.grauInstr is not None or
self.indPriEmpr is not None or
self.nmSoc is not None or
self.nascimento is not None or
self.documentos is not None or
self.endereco is not None or
self.trabEstrangeiro is not None or
self.infoDeficiencia is not None or
self.dependente or
self.aposentadoria is not None or
self.contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='trabalhador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('trabalhador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='trabalhador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='trabalhador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='trabalhador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='trabalhador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cpfTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfTrab>%s</%scpfTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfTrab), input_name='cpfTrab')), namespace_, eol_))
if self.nisTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snisTrab>%s</%snisTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nisTrab), input_name='nisTrab')), namespace_, eol_))
if self.nmTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmTrab>%s</%snmTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmTrab), input_name='nmTrab')), namespace_, eol_))
if self.sexo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%ssexo>%s</%ssexo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.sexo), input_name='sexo')), namespace_, eol_))
if self.racaCor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sracaCor>%s</%sracaCor>%s' % (namespace_, self.gds_format_integer(self.racaCor, input_name='racaCor'), namespace_, eol_))
if self.estCiv is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sestCiv>%s</%sestCiv>%s' % (namespace_, self.gds_format_integer(self.estCiv, input_name='estCiv'), namespace_, eol_))
if self.grauInstr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sgrauInstr>%s</%sgrauInstr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.grauInstr), input_name='grauInstr')), namespace_, eol_))
if self.indPriEmpr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sindPriEmpr>%s</%sindPriEmpr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.indPriEmpr), input_name='indPriEmpr')), namespace_, eol_))
if self.nmSoc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmSoc>%s</%snmSoc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmSoc), input_name='nmSoc')), namespace_, eol_))
if self.nascimento is not None:
self.nascimento.export(outfile, level, namespace_, name_='nascimento', pretty_print=pretty_print)
if self.documentos is not None:
self.documentos.export(outfile, level, namespace_, name_='documentos', pretty_print=pretty_print)
if self.endereco is not None:
self.endereco.export(outfile, level, namespace_, name_='endereco', pretty_print=pretty_print)
if self.trabEstrangeiro is not None:
self.trabEstrangeiro.export(outfile, level, namespace_, name_='trabEstrangeiro', pretty_print=pretty_print)
if self.infoDeficiencia is not None:
self.infoDeficiencia.export(outfile, level, namespace_, name_='infoDeficiencia', pretty_print=pretty_print)
for dependente_ in self.dependente:
dependente_.export(outfile, level, namespace_, name_='dependente', pretty_print=pretty_print)
if self.aposentadoria is not None:
self.aposentadoria.export(outfile, level, namespace_, name_='aposentadoria', pretty_print=pretty_print)
if self.contato is not None:
self.contato.export(outfile, level, namespace_, name_='contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cpfTrab':
cpfTrab_ = child_.text
cpfTrab_ = self.gds_validate_string(cpfTrab_, node, 'cpfTrab')
self.cpfTrab = cpfTrab_
elif nodeName_ == 'nisTrab':
nisTrab_ = child_.text
nisTrab_ = self.gds_validate_string(nisTrab_, node, 'nisTrab')
self.nisTrab = nisTrab_
elif nodeName_ == 'nmTrab':
nmTrab_ = child_.text
nmTrab_ = self.gds_validate_string(nmTrab_, node, 'nmTrab')
self.nmTrab = nmTrab_
elif nodeName_ == 'sexo':
sexo_ = child_.text
sexo_ = self.gds_validate_string(sexo_, node, 'sexo')
self.sexo = sexo_
elif nodeName_ == 'racaCor':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'racaCor')
self.racaCor = ival_
elif nodeName_ == 'estCiv':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'estCiv')
self.estCiv = ival_
elif nodeName_ == 'grauInstr':
grauInstr_ = child_.text
grauInstr_ = self.gds_validate_string(grauInstr_, node, 'grauInstr')
self.grauInstr = grauInstr_
elif nodeName_ == 'indPriEmpr':
indPriEmpr_ = child_.text
indPriEmpr_ = self.gds_validate_string(indPriEmpr_, node, 'indPriEmpr')
self.indPriEmpr = indPriEmpr_
elif nodeName_ == 'nmSoc':
nmSoc_ = child_.text
nmSoc_ = self.gds_validate_string(nmSoc_, node, 'nmSoc')
self.nmSoc = nmSoc_
elif nodeName_ == 'nascimento':
obj_ = nascimento.factory()
obj_.build(child_)
self.nascimento = obj_
obj_.original_tagname_ = 'nascimento'
elif nodeName_ == 'documentos':
obj_ = documentos.factory()
obj_.build(child_)
self.documentos = obj_
obj_.original_tagname_ = 'documentos'
elif nodeName_ == 'endereco':
obj_ = endereco.factory()
obj_.build(child_)
self.endereco = obj_
obj_.original_tagname_ = 'endereco'
elif nodeName_ == 'trabEstrangeiro':
obj_ = TTrabEstrang.factory()
obj_.build(child_)
self.trabEstrangeiro = obj_
obj_.original_tagname_ = 'trabEstrangeiro'
elif nodeName_ == 'infoDeficiencia':
obj_ = infoDeficiencia.factory()
obj_.build(child_)
self.infoDeficiencia = obj_
obj_.original_tagname_ = 'infoDeficiencia'
elif nodeName_ == 'dependente':
obj_ = TDependente.factory()
obj_.build(child_)
self.dependente.append(obj_)
obj_.original_tagname_ = 'dependente'
elif nodeName_ == 'aposentadoria':
obj_ = aposentadoria.factory()
obj_.build(child_)
self.aposentadoria = obj_
obj_.original_tagname_ = 'aposentadoria'
elif nodeName_ == 'contato':
obj_ = TContato.factory()
obj_.build(child_)
self.contato = obj_
obj_.original_tagname_ = 'contato'
# end class trabalhador
class cpfTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfTrab.subclass:
return cpfTrab.subclass(*args_, **kwargs_)
else:
return cpfTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfTrab
class nisTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nisTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nisTrab.subclass:
return nisTrab.subclass(*args_, **kwargs_)
else:
return nisTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nisTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nisTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nisTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nisTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nisTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nisTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nisTrab
class nmTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmTrab.subclass:
return nmTrab.subclass(*args_, **kwargs_)
else:
return nmTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmTrab
class sexo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, sexo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if sexo.subclass:
return sexo.subclass(*args_, **kwargs_)
else:
return sexo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='sexo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('sexo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='sexo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='sexo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='sexo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='sexo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class sexo
class racaCor(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, racaCor)
if subclass is not None:
return subclass(*args_, **kwargs_)
if racaCor.subclass:
return racaCor.subclass(*args_, **kwargs_)
else:
return racaCor(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='racaCor', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('racaCor')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='racaCor')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='racaCor', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='racaCor'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='racaCor', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class racaCor
class estCiv(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, estCiv)
if subclass is not None:
return subclass(*args_, **kwargs_)
if estCiv.subclass:
return estCiv.subclass(*args_, **kwargs_)
else:
return estCiv(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='estCiv', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('estCiv')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='estCiv')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='estCiv', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='estCiv'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='estCiv', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class estCiv
class grauInstr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, grauInstr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if grauInstr.subclass:
return grauInstr.subclass(*args_, **kwargs_)
else:
return grauInstr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='grauInstr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('grauInstr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='grauInstr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='grauInstr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='grauInstr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='grauInstr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class grauInstr
class indPriEmpr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, indPriEmpr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if indPriEmpr.subclass:
return indPriEmpr.subclass(*args_, **kwargs_)
else:
return indPriEmpr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='indPriEmpr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('indPriEmpr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='indPriEmpr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='indPriEmpr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indPriEmpr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='indPriEmpr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class indPriEmpr
class nmSoc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmSoc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmSoc.subclass:
return nmSoc.subclass(*args_, **kwargs_)
else:
return nmSoc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmSoc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmSoc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmSoc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmSoc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmSoc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmSoc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmSoc
class nascimento(GeneratedsSuper):
"""Grupo de informações do nascimento do trabalhador"""
subclass = None
superclass = None
def __init__(self, dtNascto=None, codMunic=None, uf=None, paisNascto=None, paisNac=None, nmMae=None, nmPai=None):
self.original_tagname_ = None
if isinstance(dtNascto, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtNascto, '%Y-%m-%d').date()
else:
initvalue_ = dtNascto
self.dtNascto = initvalue_
self.codMunic = codMunic
self.uf = uf
self.paisNascto = paisNascto
self.paisNac = paisNac
self.nmMae = nmMae
self.nmPai = nmPai
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nascimento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nascimento.subclass:
return nascimento.subclass(*args_, **kwargs_)
else:
return nascimento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtNascto(self): return self.dtNascto
def set_dtNascto(self, dtNascto): self.dtNascto = dtNascto
def get_codMunic(self): return self.codMunic
def set_codMunic(self, codMunic): self.codMunic = codMunic
def get_uf(self): return self.uf
def set_uf(self, uf): self.uf = uf
def get_paisNascto(self): return self.paisNascto
def set_paisNascto(self, paisNascto): self.paisNascto = paisNascto
def get_paisNac(self): return self.paisNac
def set_paisNac(self, paisNac): self.paisNac = paisNac
def get_nmMae(self): return self.nmMae
def set_nmMae(self, nmMae): self.nmMae = nmMae
def get_nmPai(self): return self.nmPai
def set_nmPai(self, nmPai): self.nmPai = nmPai
def hasContent_(self):
if (
self.dtNascto is not None or
self.codMunic is not None or
self.uf is not None or
self.paisNascto is not None or
self.paisNac is not None or
self.nmMae is not None or
self.nmPai is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nascimento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nascimento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nascimento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nascimento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nascimento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nascimento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtNascto is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtNascto>%s</%sdtNascto>%s' % (namespace_, self.gds_format_date(self.dtNascto, input_name='dtNascto'), namespace_, eol_))
if self.codMunic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodMunic>%s</%scodMunic>%s' % (namespace_, self.gds_format_integer(self.codMunic, input_name='codMunic'), namespace_, eol_))
if self.uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%suf>%s</%suf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.uf), input_name='uf')), namespace_, eol_))
if self.paisNascto is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%spaisNascto>%s</%spaisNascto>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisNascto), input_name='paisNascto')), namespace_, eol_))
if self.paisNac is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%spaisNac>%s</%spaisNac>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisNac), input_name='paisNac')), namespace_, eol_))
if self.nmMae is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmMae>%s</%snmMae>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmMae), input_name='nmMae')), namespace_, eol_))
if self.nmPai is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmPai>%s</%snmPai>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmPai), input_name='nmPai')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtNascto':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtNascto = dval_
elif nodeName_ == 'codMunic':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'codMunic')
self.codMunic = ival_
elif nodeName_ == 'uf':
uf_ = child_.text
uf_ = self.gds_validate_string(uf_, node, 'uf')
self.uf = uf_
elif nodeName_ == 'paisNascto':
paisNascto_ = child_.text
paisNascto_ = self.gds_validate_string(paisNascto_, node, 'paisNascto')
self.paisNascto = paisNascto_
elif nodeName_ == 'paisNac':
paisNac_ = child_.text
paisNac_ = self.gds_validate_string(paisNac_, node, 'paisNac')
self.paisNac = paisNac_
elif nodeName_ == 'nmMae':
nmMae_ = child_.text
nmMae_ = self.gds_validate_string(nmMae_, node, 'nmMae')
self.nmMae = nmMae_
elif nodeName_ == 'nmPai':
nmPai_ = child_.text
nmPai_ = self.gds_validate_string(nmPai_, node, 'nmPai')
self.nmPai = nmPai_
# end class nascimento
class dtNascto(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtNascto)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtNascto.subclass:
return dtNascto.subclass(*args_, **kwargs_)
else:
return dtNascto(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtNascto', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtNascto')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtNascto')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtNascto', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtNascto'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtNascto', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtNascto
class codMunic(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codMunic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codMunic.subclass:
return codMunic.subclass(*args_, **kwargs_)
else:
return codMunic(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codMunic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codMunic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codMunic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codMunic', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codMunic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codMunic', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codMunic
class uf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, uf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if uf.subclass:
return uf.subclass(*args_, **kwargs_)
else:
return uf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='uf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('uf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='uf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='uf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='uf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='uf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class uf
class paisNascto(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, paisNascto)
if subclass is not None:
return subclass(*args_, **kwargs_)
if paisNascto.subclass:
return paisNascto.subclass(*args_, **kwargs_)
else:
return paisNascto(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='paisNascto', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisNascto')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisNascto')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='paisNascto', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisNascto'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='paisNascto', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class paisNascto
class paisNac(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, paisNac)
if subclass is not None:
return subclass(*args_, **kwargs_)
if paisNac.subclass:
return paisNac.subclass(*args_, **kwargs_)
else:
return paisNac(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='paisNac', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisNac')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisNac')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='paisNac', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisNac'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='paisNac', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class paisNac
class nmMae(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmMae)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmMae.subclass:
return nmMae.subclass(*args_, **kwargs_)
else:
return nmMae(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmMae', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmMae')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmMae')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmMae', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmMae'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmMae', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmMae
class nmPai(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmPai)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmPai.subclass:
return nmPai.subclass(*args_, **kwargs_)
else:
return nmPai(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmPai', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmPai')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmPai')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmPai', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmPai'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmPai', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmPai
class documentos(GeneratedsSuper):
"""Informações dos documentos pessoais do trabalhador"""
subclass = None
superclass = None
def __init__(self, CTPS=None, RIC=None, RG=None, RNE=None, OC=None, CNH=None):
self.original_tagname_ = None
self.CTPS = CTPS
self.RIC = RIC
self.RG = RG
self.RNE = RNE
self.OC = OC
self.CNH = CNH
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, documentos)
if subclass is not None:
return subclass(*args_, **kwargs_)
if documentos.subclass:
return documentos.subclass(*args_, **kwargs_)
else:
return documentos(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CTPS(self): return self.CTPS
def set_CTPS(self, CTPS): self.CTPS = CTPS
def get_RIC(self): return self.RIC
def set_RIC(self, RIC): self.RIC = RIC
def get_RG(self): return self.RG
def set_RG(self, RG): self.RG = RG
def get_RNE(self): return self.RNE
def set_RNE(self, RNE): self.RNE = RNE
def get_OC(self): return self.OC
def set_OC(self, OC): self.OC = OC
def get_CNH(self): return self.CNH
def set_CNH(self, CNH): self.CNH = CNH
def hasContent_(self):
if (
self.CTPS is not None or
self.RIC is not None or
self.RG is not None or
self.RNE is not None or
self.OC is not None or
self.CNH is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='documentos', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('documentos')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='documentos')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='documentos', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='documentos'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='documentos', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CTPS is not None:
self.CTPS.export(outfile, level, namespace_, name_='CTPS', pretty_print=pretty_print)
if self.RIC is not None:
self.RIC.export(outfile, level, namespace_, name_='RIC', pretty_print=pretty_print)
if self.RG is not None:
self.RG.export(outfile, level, namespace_, name_='RG', pretty_print=pretty_print)
if self.RNE is not None:
self.RNE.export(outfile, level, namespace_, name_='RNE', pretty_print=pretty_print)
if self.OC is not None:
self.OC.export(outfile, level, namespace_, name_='OC', pretty_print=pretty_print)
if self.CNH is not None:
self.CNH.export(outfile, level, namespace_, name_='CNH', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CTPS':
obj_ = TCtps.factory()
obj_.build(child_)
self.CTPS = obj_
obj_.original_tagname_ = 'CTPS'
elif nodeName_ == 'RIC':
obj_ = TRic.factory()
obj_.build(child_)
self.RIC = obj_
obj_.original_tagname_ = 'RIC'
elif nodeName_ == 'RG':
obj_ = TRg.factory()
obj_.build(child_)
self.RG = obj_
obj_.original_tagname_ = 'RG'
elif nodeName_ == 'RNE':
obj_ = TRne.factory()
obj_.build(child_)
self.RNE = obj_
obj_.original_tagname_ = 'RNE'
elif nodeName_ == 'OC':
obj_ = TOc.factory()
obj_.build(child_)
self.OC = obj_
obj_.original_tagname_ = 'OC'
elif nodeName_ == 'CNH':
obj_ = TCnh.factory()
obj_.build(child_)
self.CNH = obj_
obj_.original_tagname_ = 'CNH'
# end class documentos
class endereco(GeneratedsSuper):
"""Grupo de informações do endereço do Trabalhador"""
subclass = None
superclass = None
def __init__(self, brasil=None, exterior=None):
self.original_tagname_ = None
self.brasil = brasil
self.exterior = exterior
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, endereco)
if subclass is not None:
return subclass(*args_, **kwargs_)
if endereco.subclass:
return endereco.subclass(*args_, **kwargs_)
else:
return endereco(*args_, **kwargs_)
factory = staticmethod(factory)
def get_brasil(self): return self.brasil
def set_brasil(self, brasil): self.brasil = brasil
def get_exterior(self): return self.exterior
def set_exterior(self, exterior): self.exterior = exterior
def hasContent_(self):
if (
self.brasil is not None or
self.exterior is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='endereco', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('endereco')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='endereco')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='endereco', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='endereco'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='endereco', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.brasil is not None:
self.brasil.export(outfile, level, namespace_, name_='brasil', pretty_print=pretty_print)
if self.exterior is not None:
self.exterior.export(outfile, level, namespace_, name_='exterior', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'brasil':
obj_ = TEnderecoBrasil.factory()
obj_.build(child_)
self.brasil = obj_
obj_.original_tagname_ = 'brasil'
elif nodeName_ == 'exterior':
obj_ = TEnderecoExterior.factory()
obj_.build(child_)
self.exterior = obj_
obj_.original_tagname_ = 'exterior'
# end class endereco
class infoDeficiencia(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, defFisica=None, defVisual=None, defAuditiva=None, defMental=None, defIntelectual=None, reabReadap=None, infoCota=None, observacao=None):
self.original_tagname_ = None
self.defFisica = defFisica
self.defVisual = defVisual
self.defAuditiva = defAuditiva
self.defMental = defMental
self.defIntelectual = defIntelectual
self.reabReadap = reabReadap
self.infoCota = infoCota
self.observacao = observacao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoDeficiencia)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoDeficiencia.subclass:
return infoDeficiencia.subclass(*args_, **kwargs_)
else:
return infoDeficiencia(*args_, **kwargs_)
factory = staticmethod(factory)
def get_defFisica(self): return self.defFisica
def set_defFisica(self, defFisica): self.defFisica = defFisica
def get_defVisual(self): return self.defVisual
def set_defVisual(self, defVisual): self.defVisual = defVisual
def get_defAuditiva(self): return self.defAuditiva
def set_defAuditiva(self, defAuditiva): self.defAuditiva = defAuditiva
def get_defMental(self): return self.defMental
def set_defMental(self, defMental): self.defMental = defMental
def get_defIntelectual(self): return self.defIntelectual
def set_defIntelectual(self, defIntelectual): self.defIntelectual = defIntelectual
def get_reabReadap(self): return self.reabReadap
def set_reabReadap(self, reabReadap): self.reabReadap = reabReadap
def get_infoCota(self): return self.infoCota
def set_infoCota(self, infoCota): self.infoCota = infoCota
def get_observacao(self): return self.observacao
def set_observacao(self, observacao): self.observacao = observacao
def hasContent_(self):
if (
self.defFisica is not None or
self.defVisual is not None or
self.defAuditiva is not None or
self.defMental is not None or
self.defIntelectual is not None or
self.reabReadap is not None or
self.infoCota is not None or
self.observacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoDeficiencia', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoDeficiencia')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoDeficiencia')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoDeficiencia', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoDeficiencia'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoDeficiencia', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.defFisica is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefFisica>%s</%sdefFisica>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defFisica), input_name='defFisica')), namespace_, eol_))
if self.defVisual is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefVisual>%s</%sdefVisual>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defVisual), input_name='defVisual')), namespace_, eol_))
if self.defAuditiva is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefAuditiva>%s</%sdefAuditiva>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defAuditiva), input_name='defAuditiva')), namespace_, eol_))
if self.defMental is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefMental>%s</%sdefMental>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defMental), input_name='defMental')), namespace_, eol_))
if self.defIntelectual is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdefIntelectual>%s</%sdefIntelectual>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.defIntelectual), input_name='defIntelectual')), namespace_, eol_))
if self.reabReadap is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sreabReadap>%s</%sreabReadap>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.reabReadap), input_name='reabReadap')), namespace_, eol_))
if self.infoCota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sinfoCota>%s</%sinfoCota>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.infoCota), input_name='infoCota')), namespace_, eol_))
if self.observacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sobservacao>%s</%sobservacao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.observacao), input_name='observacao')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'defFisica':
defFisica_ = child_.text
defFisica_ = self.gds_validate_string(defFisica_, node, 'defFisica')
self.defFisica = defFisica_
elif nodeName_ == 'defVisual':
defVisual_ = child_.text
defVisual_ = self.gds_validate_string(defVisual_, node, 'defVisual')
self.defVisual = defVisual_
elif nodeName_ == 'defAuditiva':
defAuditiva_ = child_.text
defAuditiva_ = self.gds_validate_string(defAuditiva_, node, 'defAuditiva')
self.defAuditiva = defAuditiva_
elif nodeName_ == 'defMental':
defMental_ = child_.text
defMental_ = self.gds_validate_string(defMental_, node, 'defMental')
self.defMental = defMental_
elif nodeName_ == 'defIntelectual':
defIntelectual_ = child_.text
defIntelectual_ = self.gds_validate_string(defIntelectual_, node, 'defIntelectual')
self.defIntelectual = defIntelectual_
elif nodeName_ == 'reabReadap':
reabReadap_ = child_.text
reabReadap_ = self.gds_validate_string(reabReadap_, node, 'reabReadap')
self.reabReadap = reabReadap_
elif nodeName_ == 'infoCota':
infoCota_ = child_.text
infoCota_ = self.gds_validate_string(infoCota_, node, 'infoCota')
self.infoCota = infoCota_
elif nodeName_ == 'observacao':
observacao_ = child_.text
observacao_ = self.gds_validate_string(observacao_, node, 'observacao')
self.observacao = observacao_
# end class infoDeficiencia
class defFisica(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defFisica)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defFisica.subclass:
return defFisica.subclass(*args_, **kwargs_)
else:
return defFisica(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defFisica', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defFisica')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defFisica')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defFisica', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defFisica'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defFisica', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defFisica
class defVisual(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defVisual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defVisual.subclass:
return defVisual.subclass(*args_, **kwargs_)
else:
return defVisual(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defVisual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defVisual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defVisual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defVisual', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defVisual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defVisual', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defVisual
class defAuditiva(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defAuditiva)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defAuditiva.subclass:
return defAuditiva.subclass(*args_, **kwargs_)
else:
return defAuditiva(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defAuditiva', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defAuditiva')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defAuditiva')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defAuditiva', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defAuditiva'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defAuditiva', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defAuditiva
class defMental(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defMental)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defMental.subclass:
return defMental.subclass(*args_, **kwargs_)
else:
return defMental(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defMental', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defMental')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defMental')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defMental', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defMental'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defMental', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defMental
class defIntelectual(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, defIntelectual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if defIntelectual.subclass:
return defIntelectual.subclass(*args_, **kwargs_)
else:
return defIntelectual(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='defIntelectual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('defIntelectual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='defIntelectual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='defIntelectual', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='defIntelectual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='defIntelectual', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class defIntelectual
class reabReadap(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, reabReadap)
if subclass is not None:
return subclass(*args_, **kwargs_)
if reabReadap.subclass:
return reabReadap.subclass(*args_, **kwargs_)
else:
return reabReadap(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='reabReadap', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('reabReadap')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='reabReadap')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='reabReadap', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='reabReadap'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='reabReadap', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class reabReadap
class infoCota(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoCota)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoCota.subclass:
return infoCota.subclass(*args_, **kwargs_)
else:
return infoCota(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoCota', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoCota')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoCota')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoCota', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoCota'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoCota', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class infoCota
class observacao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, observacao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if observacao.subclass:
return observacao.subclass(*args_, **kwargs_)
else:
return observacao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='observacao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('observacao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='observacao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='observacao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='observacao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='observacao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class observacao
class aposentadoria(GeneratedsSuper):
"""Informação de aposentadoria do trabalhador"""
subclass = None
superclass = None
def __init__(self, trabAposent=None):
self.original_tagname_ = None
self.trabAposent = trabAposent
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, aposentadoria)
if subclass is not None:
return subclass(*args_, **kwargs_)
if aposentadoria.subclass:
return aposentadoria.subclass(*args_, **kwargs_)
else:
return aposentadoria(*args_, **kwargs_)
factory = staticmethod(factory)
def get_trabAposent(self): return self.trabAposent
def set_trabAposent(self, trabAposent): self.trabAposent = trabAposent
def hasContent_(self):
if (
self.trabAposent is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='aposentadoria', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('aposentadoria')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='aposentadoria')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='aposentadoria', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='aposentadoria'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='aposentadoria', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.trabAposent is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%strabAposent>%s</%strabAposent>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.trabAposent), input_name='trabAposent')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'trabAposent':
trabAposent_ = child_.text
trabAposent_ = self.gds_validate_string(trabAposent_, node, 'trabAposent')
self.trabAposent = trabAposent_
# end class aposentadoria
class trabAposent(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, trabAposent)
if subclass is not None:
return subclass(*args_, **kwargs_)
if trabAposent.subclass:
return trabAposent.subclass(*args_, **kwargs_)
else:
return trabAposent(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='trabAposent', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('trabAposent')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='trabAposent')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='trabAposent', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='trabAposent'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='trabAposent', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class trabAposent
class vinculo(GeneratedsSuper):
"""Grupo de informações do vínculo"""
subclass = None
superclass = None
def __init__(self, matricula=None, tpRegTrab=None, tpRegPrev=None, nrRecInfPrelim=None, cadIni=None, infoRegimeTrab=None, infoContrato=None, sucessaoVinc=None, transfDom=None, afastamento=None, desligamento=None):
self.original_tagname_ = None
self.matricula = matricula
self.tpRegTrab = tpRegTrab
self.tpRegPrev = tpRegPrev
self.nrRecInfPrelim = nrRecInfPrelim
self.cadIni = cadIni
self.infoRegimeTrab = infoRegimeTrab
self.infoContrato = infoContrato
self.sucessaoVinc = sucessaoVinc
self.transfDom = transfDom
self.afastamento = afastamento
self.desligamento = desligamento
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, vinculo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if vinculo.subclass:
return vinculo.subclass(*args_, **kwargs_)
else:
return vinculo(*args_, **kwargs_)
factory = staticmethod(factory)
def get_matricula(self): return self.matricula
def set_matricula(self, matricula): self.matricula = matricula
def get_tpRegTrab(self): return self.tpRegTrab
def set_tpRegTrab(self, tpRegTrab): self.tpRegTrab = tpRegTrab
def get_tpRegPrev(self): return self.tpRegPrev
def set_tpRegPrev(self, tpRegPrev): self.tpRegPrev = tpRegPrev
def get_nrRecInfPrelim(self): return self.nrRecInfPrelim
def set_nrRecInfPrelim(self, nrRecInfPrelim): self.nrRecInfPrelim = nrRecInfPrelim
def get_cadIni(self): return self.cadIni
def set_cadIni(self, cadIni): self.cadIni = cadIni
def get_infoRegimeTrab(self): return self.infoRegimeTrab
def set_infoRegimeTrab(self, infoRegimeTrab): self.infoRegimeTrab = infoRegimeTrab
def get_infoContrato(self): return self.infoContrato
def set_infoContrato(self, infoContrato): self.infoContrato = infoContrato
def get_sucessaoVinc(self): return self.sucessaoVinc
def set_sucessaoVinc(self, sucessaoVinc): self.sucessaoVinc = sucessaoVinc
def get_transfDom(self): return self.transfDom
def set_transfDom(self, transfDom): self.transfDom = transfDom
def get_afastamento(self): return self.afastamento
def set_afastamento(self, afastamento): self.afastamento = afastamento
def get_desligamento(self): return self.desligamento
def set_desligamento(self, desligamento): self.desligamento = desligamento
def hasContent_(self):
if (
self.matricula is not None or
self.tpRegTrab is not None or
self.tpRegPrev is not None or
self.nrRecInfPrelim is not None or
self.cadIni is not None or
self.infoRegimeTrab is not None or
self.infoContrato is not None or
self.sucessaoVinc is not None or
self.transfDom is not None or
self.afastamento is not None or
self.desligamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='vinculo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('vinculo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='vinculo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='vinculo', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='vinculo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='vinculo', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.matricula is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%smatricula>%s</%smatricula>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.matricula), input_name='matricula')), namespace_, eol_))
if self.tpRegTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpRegTrab>%s</%stpRegTrab>%s' % (namespace_, self.gds_format_integer(self.tpRegTrab, input_name='tpRegTrab'), namespace_, eol_))
if self.tpRegPrev is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpRegPrev>%s</%stpRegPrev>%s' % (namespace_, self.gds_format_integer(self.tpRegPrev, input_name='tpRegPrev'), namespace_, eol_))
if self.nrRecInfPrelim is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRecInfPrelim>%s</%snrRecInfPrelim>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRecInfPrelim), input_name='nrRecInfPrelim')), namespace_, eol_))
if self.cadIni is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scadIni>%s</%scadIni>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cadIni), input_name='cadIni')), namespace_, eol_))
if self.infoRegimeTrab is not None:
self.infoRegimeTrab.export(outfile, level, namespace_, name_='infoRegimeTrab', pretty_print=pretty_print)
if self.infoContrato is not None:
self.infoContrato.export(outfile, level, namespace_, name_='infoContrato', pretty_print=pretty_print)
if self.sucessaoVinc is not None:
self.sucessaoVinc.export(outfile, level, namespace_, name_='sucessaoVinc', pretty_print=pretty_print)
if self.transfDom is not None:
self.transfDom.export(outfile, level, namespace_, name_='transfDom', pretty_print=pretty_print)
if self.afastamento is not None:
self.afastamento.export(outfile, level, namespace_, name_='afastamento', pretty_print=pretty_print)
if self.desligamento is not None:
self.desligamento.export(outfile, level, namespace_, name_='desligamento', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'matricula':
matricula_ = child_.text
matricula_ = self.gds_validate_string(matricula_, node, 'matricula')
self.matricula = matricula_
elif nodeName_ == 'tpRegTrab':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpRegTrab')
self.tpRegTrab = ival_
elif nodeName_ == 'tpRegPrev':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpRegPrev')
self.tpRegPrev = ival_
elif nodeName_ == 'nrRecInfPrelim':
nrRecInfPrelim_ = child_.text
nrRecInfPrelim_ = self.gds_validate_string(nrRecInfPrelim_, node, 'nrRecInfPrelim')
self.nrRecInfPrelim = nrRecInfPrelim_
elif nodeName_ == 'cadIni':
cadIni_ = child_.text
cadIni_ = self.gds_validate_string(cadIni_, node, 'cadIni')
self.cadIni = cadIni_
elif nodeName_ == 'infoRegimeTrab':
obj_ = infoRegimeTrab.factory()
obj_.build(child_)
self.infoRegimeTrab = obj_
obj_.original_tagname_ = 'infoRegimeTrab'
elif nodeName_ == 'infoContrato':
obj_ = TDadosContrato.factory()
obj_.build(child_)
self.infoContrato = obj_
obj_.original_tagname_ = 'infoContrato'
elif nodeName_ == 'sucessaoVinc':
obj_ = sucessaoVinc.factory()
obj_.build(child_)
self.sucessaoVinc = obj_
obj_.original_tagname_ = 'sucessaoVinc'
elif nodeName_ == 'transfDom':
obj_ = transfDom.factory()
obj_.build(child_)
self.transfDom = obj_
obj_.original_tagname_ = 'transfDom'
elif nodeName_ == 'afastamento':
obj_ = afastamento.factory()
obj_.build(child_)
self.afastamento = obj_
obj_.original_tagname_ = 'afastamento'
elif nodeName_ == 'desligamento':
obj_ = desligamento.factory()
obj_.build(child_)
self.desligamento = obj_
obj_.original_tagname_ = 'desligamento'
# end class vinculo
class matricula(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, matricula)
if subclass is not None:
return subclass(*args_, **kwargs_)
if matricula.subclass:
return matricula.subclass(*args_, **kwargs_)
else:
return matricula(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='matricula', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('matricula')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='matricula')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='matricula', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='matricula'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='matricula', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class matricula
class tpRegTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRegTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRegTrab.subclass:
return tpRegTrab.subclass(*args_, **kwargs_)
else:
return tpRegTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpRegTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRegTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRegTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpRegTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpRegTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpRegTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpRegTrab
class tpRegPrev(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRegPrev)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRegPrev.subclass:
return tpRegPrev.subclass(*args_, **kwargs_)
else:
return tpRegPrev(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpRegPrev', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRegPrev')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRegPrev')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpRegPrev', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpRegPrev'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpRegPrev', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpRegPrev
class nrRecInfPrelim(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRecInfPrelim)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRecInfPrelim.subclass:
return nrRecInfPrelim.subclass(*args_, **kwargs_)
else:
return nrRecInfPrelim(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRecInfPrelim', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRecInfPrelim')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRecInfPrelim')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRecInfPrelim', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRecInfPrelim'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRecInfPrelim', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRecInfPrelim
class cadIni(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cadIni)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cadIni.subclass:
return cadIni.subclass(*args_, **kwargs_)
else:
return cadIni(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cadIni', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cadIni')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cadIni')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cadIni', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cadIni'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cadIni', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cadIni
class infoRegimeTrab(GeneratedsSuper):
"""Informações do regime trabalhista"""
subclass = None
superclass = None
def __init__(self, infoCeletista=None, infoEstatutario=None):
self.original_tagname_ = None
self.infoCeletista = infoCeletista
self.infoEstatutario = infoEstatutario
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoRegimeTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoRegimeTrab.subclass:
return infoRegimeTrab.subclass(*args_, **kwargs_)
else:
return infoRegimeTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_infoCeletista(self): return self.infoCeletista
def set_infoCeletista(self, infoCeletista): self.infoCeletista = infoCeletista
def get_infoEstatutario(self): return self.infoEstatutario
def set_infoEstatutario(self, infoEstatutario): self.infoEstatutario = infoEstatutario
def hasContent_(self):
if (
self.infoCeletista is not None or
self.infoEstatutario is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoRegimeTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoRegimeTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoRegimeTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoRegimeTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoRegimeTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoRegimeTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.infoCeletista is not None:
self.infoCeletista.export(outfile, level, namespace_, name_='infoCeletista', pretty_print=pretty_print)
if self.infoEstatutario is not None:
self.infoEstatutario.export(outfile, level, namespace_, name_='infoEstatutario', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'infoCeletista':
obj_ = infoCeletista.factory()
obj_.build(child_)
self.infoCeletista = obj_
obj_.original_tagname_ = 'infoCeletista'
elif nodeName_ == 'infoEstatutario':
obj_ = infoEstatutario.factory()
obj_.build(child_)
self.infoEstatutario = obj_
obj_.original_tagname_ = 'infoEstatutario'
# end class infoRegimeTrab
class infoCeletista(GeneratedsSuper):
"""Informações de Trabalhador Celetista"""
subclass = None
superclass = None
def __init__(self, dtAdm=None, tpAdmissao=None, indAdmissao=None, tpRegJor=None, natAtividade=None, dtBase=None, cnpjSindCategProf=None, FGTS=None, trabTemporario=None, aprend=None):
self.original_tagname_ = None
if isinstance(dtAdm, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtAdm, '%Y-%m-%d').date()
else:
initvalue_ = dtAdm
self.dtAdm = initvalue_
self.tpAdmissao = tpAdmissao
self.indAdmissao = indAdmissao
self.tpRegJor = tpRegJor
self.natAtividade = natAtividade
self.dtBase = dtBase
self.cnpjSindCategProf = cnpjSindCategProf
self.FGTS = FGTS
self.trabTemporario = trabTemporario
self.aprend = aprend
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoCeletista)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoCeletista.subclass:
return infoCeletista.subclass(*args_, **kwargs_)
else:
return infoCeletista(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtAdm(self): return self.dtAdm
def set_dtAdm(self, dtAdm): self.dtAdm = dtAdm
def get_tpAdmissao(self): return self.tpAdmissao
def set_tpAdmissao(self, tpAdmissao): self.tpAdmissao = tpAdmissao
def get_indAdmissao(self): return self.indAdmissao
def set_indAdmissao(self, indAdmissao): self.indAdmissao = indAdmissao
def get_tpRegJor(self): return self.tpRegJor
def set_tpRegJor(self, tpRegJor): self.tpRegJor = tpRegJor
def get_natAtividade(self): return self.natAtividade
def set_natAtividade(self, natAtividade): self.natAtividade = natAtividade
def get_dtBase(self): return self.dtBase
def set_dtBase(self, dtBase): self.dtBase = dtBase
def get_cnpjSindCategProf(self): return self.cnpjSindCategProf
def set_cnpjSindCategProf(self, cnpjSindCategProf): self.cnpjSindCategProf = cnpjSindCategProf
def get_FGTS(self): return self.FGTS
def set_FGTS(self, FGTS): self.FGTS = FGTS
def get_trabTemporario(self): return self.trabTemporario
def set_trabTemporario(self, trabTemporario): self.trabTemporario = trabTemporario
def get_aprend(self): return self.aprend
def set_aprend(self, aprend): self.aprend = aprend
def hasContent_(self):
if (
self.dtAdm is not None or
self.tpAdmissao is not None or
self.indAdmissao is not None or
self.tpRegJor is not None or
self.natAtividade is not None or
self.dtBase is not None or
self.cnpjSindCategProf is not None or
self.FGTS is not None or
self.trabTemporario is not None or
self.aprend is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoCeletista', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoCeletista')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoCeletista')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoCeletista', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoCeletista'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoCeletista', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtAdm is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtAdm>%s</%sdtAdm>%s' % (namespace_, self.gds_format_date(self.dtAdm, input_name='dtAdm'), namespace_, eol_))
if self.tpAdmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpAdmissao>%s</%stpAdmissao>%s' % (namespace_, self.gds_format_integer(self.tpAdmissao, input_name='tpAdmissao'), namespace_, eol_))
if self.indAdmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sindAdmissao>%s</%sindAdmissao>%s' % (namespace_, self.gds_format_integer(self.indAdmissao, input_name='indAdmissao'), namespace_, eol_))
if self.tpRegJor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpRegJor>%s</%stpRegJor>%s' % (namespace_, self.gds_format_integer(self.tpRegJor, input_name='tpRegJor'), namespace_, eol_))
if self.natAtividade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snatAtividade>%s</%snatAtividade>%s' % (namespace_, self.gds_format_integer(self.natAtividade, input_name='natAtividade'), namespace_, eol_))
if self.dtBase is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtBase>%s</%sdtBase>%s' % (namespace_, self.gds_format_integer(self.dtBase, input_name='dtBase'), namespace_, eol_))
if self.cnpjSindCategProf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scnpjSindCategProf>%s</%scnpjSindCategProf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cnpjSindCategProf), input_name='cnpjSindCategProf')), namespace_, eol_))
if self.FGTS is not None:
self.FGTS.export(outfile, level, namespace_, name_='FGTS', pretty_print=pretty_print)
if self.trabTemporario is not None:
self.trabTemporario.export(outfile, level, namespace_, name_='trabTemporario', pretty_print=pretty_print)
if self.aprend is not None:
self.aprend.export(outfile, level, namespace_, name_='aprend', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtAdm':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtAdm = dval_
elif nodeName_ == 'tpAdmissao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpAdmissao')
self.tpAdmissao = ival_
elif nodeName_ == 'indAdmissao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'indAdmissao')
self.indAdmissao = ival_
elif nodeName_ == 'tpRegJor':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpRegJor')
self.tpRegJor = ival_
elif nodeName_ == 'natAtividade':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'natAtividade')
self.natAtividade = ival_
elif nodeName_ == 'dtBase':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'dtBase')
self.dtBase = ival_
elif nodeName_ == 'cnpjSindCategProf':
cnpjSindCategProf_ = child_.text
cnpjSindCategProf_ = self.gds_validate_string(cnpjSindCategProf_, node, 'cnpjSindCategProf')
self.cnpjSindCategProf = cnpjSindCategProf_
elif nodeName_ == 'FGTS':
obj_ = TFgts.factory()
obj_.build(child_)
self.FGTS = obj_
obj_.original_tagname_ = 'FGTS'
elif nodeName_ == 'trabTemporario':
obj_ = trabTemporario.factory()
obj_.build(child_)
self.trabTemporario = obj_
obj_.original_tagname_ = 'trabTemporario'
elif nodeName_ == 'aprend':
obj_ = aprend.factory()
obj_.build(child_)
self.aprend = obj_
obj_.original_tagname_ = 'aprend'
# end class infoCeletista
class dtAdm(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtAdm)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtAdm.subclass:
return dtAdm.subclass(*args_, **kwargs_)
else:
return dtAdm(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtAdm', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtAdm')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtAdm')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtAdm', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtAdm'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtAdm', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtAdm
class tpAdmissao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpAdmissao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpAdmissao.subclass:
return tpAdmissao.subclass(*args_, **kwargs_)
else:
return tpAdmissao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpAdmissao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpAdmissao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpAdmissao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpAdmissao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpAdmissao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpAdmissao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpAdmissao
class indAdmissao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, indAdmissao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if indAdmissao.subclass:
return indAdmissao.subclass(*args_, **kwargs_)
else:
return indAdmissao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='indAdmissao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('indAdmissao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='indAdmissao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='indAdmissao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indAdmissao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='indAdmissao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class indAdmissao
class tpRegJor(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRegJor)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRegJor.subclass:
return tpRegJor.subclass(*args_, **kwargs_)
else:
return tpRegJor(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpRegJor', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRegJor')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRegJor')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpRegJor', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpRegJor'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpRegJor', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpRegJor
class natAtividade(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, natAtividade)
if subclass is not None:
return subclass(*args_, **kwargs_)
if natAtividade.subclass:
return natAtividade.subclass(*args_, **kwargs_)
else:
return natAtividade(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='natAtividade', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('natAtividade')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='natAtividade')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='natAtividade', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='natAtividade'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='natAtividade', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class natAtividade
class dtBase(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtBase)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtBase.subclass:
return dtBase.subclass(*args_, **kwargs_)
else:
return dtBase(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtBase', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtBase')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtBase')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtBase', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtBase'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtBase', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtBase
class cnpjSindCategProf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cnpjSindCategProf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cnpjSindCategProf.subclass:
return cnpjSindCategProf.subclass(*args_, **kwargs_)
else:
return cnpjSindCategProf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cnpjSindCategProf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cnpjSindCategProf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cnpjSindCategProf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cnpjSindCategProf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cnpjSindCategProf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cnpjSindCategProf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cnpjSindCategProf
class trabTemporario(GeneratedsSuper):
"""Dados sobre trabalho temporário. Preenchimento obrigatório na
contratação de trabalhador temporário."""
subclass = None
superclass = None
def __init__(self, hipLeg=None, justContr=None, tpInclContr=None, ideTomadorServ=None, ideTrabSubstituido=None):
self.original_tagname_ = None
self.hipLeg = hipLeg
self.justContr = justContr
self.tpInclContr = tpInclContr
self.ideTomadorServ = ideTomadorServ
if ideTrabSubstituido is None:
self.ideTrabSubstituido = []
else:
self.ideTrabSubstituido = ideTrabSubstituido
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, trabTemporario)
if subclass is not None:
return subclass(*args_, **kwargs_)
if trabTemporario.subclass:
return trabTemporario.subclass(*args_, **kwargs_)
else:
return trabTemporario(*args_, **kwargs_)
factory = staticmethod(factory)
def get_hipLeg(self): return self.hipLeg
def set_hipLeg(self, hipLeg): self.hipLeg = hipLeg
def get_justContr(self): return self.justContr
def set_justContr(self, justContr): self.justContr = justContr
def get_tpInclContr(self): return self.tpInclContr
def set_tpInclContr(self, tpInclContr): self.tpInclContr = tpInclContr
def get_ideTomadorServ(self): return self.ideTomadorServ
def set_ideTomadorServ(self, ideTomadorServ): self.ideTomadorServ = ideTomadorServ
def get_ideTrabSubstituido(self): return self.ideTrabSubstituido
def set_ideTrabSubstituido(self, ideTrabSubstituido): self.ideTrabSubstituido = ideTrabSubstituido
def add_ideTrabSubstituido(self, value): self.ideTrabSubstituido.append(value)
def insert_ideTrabSubstituido_at(self, index, value): self.ideTrabSubstituido.insert(index, value)
def replace_ideTrabSubstituido_at(self, index, value): self.ideTrabSubstituido[index] = value
def hasContent_(self):
if (
self.hipLeg is not None or
self.justContr is not None or
self.tpInclContr is not None or
self.ideTomadorServ is not None or
self.ideTrabSubstituido
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='trabTemporario', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('trabTemporario')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='trabTemporario')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='trabTemporario', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='trabTemporario'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='trabTemporario', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.hipLeg is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%shipLeg>%s</%shipLeg>%s' % (namespace_, self.gds_format_integer(self.hipLeg, input_name='hipLeg'), namespace_, eol_))
if self.justContr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sjustContr>%s</%sjustContr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.justContr), input_name='justContr')), namespace_, eol_))
if self.tpInclContr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInclContr>%s</%stpInclContr>%s' % (namespace_, self.gds_format_integer(self.tpInclContr, input_name='tpInclContr'), namespace_, eol_))
if self.ideTomadorServ is not None:
self.ideTomadorServ.export(outfile, level, namespace_, name_='ideTomadorServ', pretty_print=pretty_print)
for ideTrabSubstituido_ in self.ideTrabSubstituido:
ideTrabSubstituido_.export(outfile, level, namespace_, name_='ideTrabSubstituido', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'hipLeg':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'hipLeg')
self.hipLeg = ival_
elif nodeName_ == 'justContr':
justContr_ = child_.text
justContr_ = self.gds_validate_string(justContr_, node, 'justContr')
self.justContr = justContr_
elif nodeName_ == 'tpInclContr':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInclContr')
self.tpInclContr = ival_
elif nodeName_ == 'ideTomadorServ':
obj_ = ideTomadorServ.factory()
obj_.build(child_)
self.ideTomadorServ = obj_
obj_.original_tagname_ = 'ideTomadorServ'
elif nodeName_ == 'ideTrabSubstituido':
obj_ = ideTrabSubstituido.factory()
obj_.build(child_)
self.ideTrabSubstituido.append(obj_)
obj_.original_tagname_ = 'ideTrabSubstituido'
# end class trabTemporario
class hipLeg(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, hipLeg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if hipLeg.subclass:
return hipLeg.subclass(*args_, **kwargs_)
else:
return hipLeg(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='hipLeg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('hipLeg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='hipLeg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='hipLeg', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='hipLeg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='hipLeg', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class hipLeg
class justContr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, justContr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if justContr.subclass:
return justContr.subclass(*args_, **kwargs_)
else:
return justContr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='justContr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('justContr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='justContr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='justContr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='justContr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='justContr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class justContr
class tpInclContr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpInclContr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpInclContr.subclass:
return tpInclContr.subclass(*args_, **kwargs_)
else:
return tpInclContr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpInclContr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInclContr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpInclContr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpInclContr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpInclContr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpInclContr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpInclContr
class ideTomadorServ(GeneratedsSuper):
"""Identifica a empresa contratante para a qual o trabalhador
temporário será alocado."""
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None, ideEstabVinc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
self.ideEstabVinc = ideEstabVinc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ideTomadorServ)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ideTomadorServ.subclass:
return ideTomadorServ.subclass(*args_, **kwargs_)
else:
return ideTomadorServ(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def get_ideEstabVinc(self): return self.ideEstabVinc
def set_ideEstabVinc(self, ideEstabVinc): self.ideEstabVinc = ideEstabVinc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None or
self.ideEstabVinc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ideTomadorServ', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ideTomadorServ')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ideTomadorServ')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ideTomadorServ', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ideTomadorServ'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ideTomadorServ', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
if self.ideEstabVinc is not None:
self.ideEstabVinc.export(outfile, level, namespace_, name_='ideEstabVinc', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
elif nodeName_ == 'ideEstabVinc':
obj_ = ideEstabVinc.factory()
obj_.build(child_)
self.ideEstabVinc = obj_
obj_.original_tagname_ = 'ideEstabVinc'
# end class ideTomadorServ
class tpInsc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpInsc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpInsc.subclass:
return tpInsc.subclass(*args_, **kwargs_)
else:
return tpInsc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpInsc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInsc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpInsc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpInsc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpInsc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpInsc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpInsc
class nrInsc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrInsc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrInsc.subclass:
return nrInsc.subclass(*args_, **kwargs_)
else:
return nrInsc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrInsc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrInsc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrInsc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrInsc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrInsc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrInsc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrInsc
class ideEstabVinc(GeneratedsSuper):
"""Identificação do estabelecimento ao qual o trabalhador temporário
está vinculado. Se o local da efetiva prestação do serviço não
possuir inscrição deverá ser informado o CNPJ/CPF ao qual o
local da efetiva prestação está vinculado. O preenchimento é
obrigatório se a inscrição do estabelecimento vinculado for
diferente de {ideTomadorServ/nrInsc}."""
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ideEstabVinc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ideEstabVinc.subclass:
return ideEstabVinc.subclass(*args_, **kwargs_)
else:
return ideEstabVinc(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ideEstabVinc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ideEstabVinc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ideEstabVinc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ideEstabVinc', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ideEstabVinc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ideEstabVinc', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
# end class ideEstabVinc
class ideTrabSubstituido(GeneratedsSuper):
"""Identificação do(s) trabalhador(es) substituído(s)"""
subclass = None
superclass = None
def __init__(self, cpfTrabSubst=None):
self.original_tagname_ = None
self.cpfTrabSubst = cpfTrabSubst
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ideTrabSubstituido)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ideTrabSubstituido.subclass:
return ideTrabSubstituido.subclass(*args_, **kwargs_)
else:
return ideTrabSubstituido(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cpfTrabSubst(self): return self.cpfTrabSubst
def set_cpfTrabSubst(self, cpfTrabSubst): self.cpfTrabSubst = cpfTrabSubst
def hasContent_(self):
if (
self.cpfTrabSubst is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ideTrabSubstituido', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ideTrabSubstituido')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ideTrabSubstituido')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ideTrabSubstituido', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ideTrabSubstituido'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ideTrabSubstituido', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cpfTrabSubst is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfTrabSubst>%s</%scpfTrabSubst>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfTrabSubst), input_name='cpfTrabSubst')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cpfTrabSubst':
cpfTrabSubst_ = child_.text
cpfTrabSubst_ = self.gds_validate_string(cpfTrabSubst_, node, 'cpfTrabSubst')
self.cpfTrabSubst = cpfTrabSubst_
# end class ideTrabSubstituido
class cpfTrabSubst(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfTrabSubst)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfTrabSubst.subclass:
return cpfTrabSubst.subclass(*args_, **kwargs_)
else:
return cpfTrabSubst(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfTrabSubst', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfTrabSubst')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfTrabSubst')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfTrabSubst', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfTrabSubst'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfTrabSubst', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfTrabSubst
class aprend(GeneratedsSuper):
"""Informações para identificação do empregador contratante de
aprendiz. Preenchimento obrigatório na contratação de aprendiz
por entidade educativa sem fins lucrativos que tenha por
objetivo a assistência ao adolescente e à educação profissional
(art. 430, inciso II, CLT) ou por entidade de prática desportiva
filiada ao Sistema Nacional do Desporto ou a Sistema de Desporto
de Estado, do Distrito Federal ou de Município (art. 430, inciso
III, CLT)"""
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, aprend)
if subclass is not None:
return subclass(*args_, **kwargs_)
if aprend.subclass:
return aprend.subclass(*args_, **kwargs_)
else:
return aprend(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='aprend', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('aprend')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='aprend')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='aprend', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='aprend'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='aprend', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
# end class aprend
class infoEstatutario(GeneratedsSuper):
"""Informações de Trabalhador Estatutário"""
subclass = None
superclass = None
def __init__(self, indProvim=None, tpProv=None, dtNomeacao=None, dtPosse=None, dtExercicio=None, tpPlanRP=None, infoDecJud=None):
self.original_tagname_ = None
self.indProvim = indProvim
self.tpProv = tpProv
if isinstance(dtNomeacao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtNomeacao, '%Y-%m-%d').date()
else:
initvalue_ = dtNomeacao
self.dtNomeacao = initvalue_
if isinstance(dtPosse, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtPosse, '%Y-%m-%d').date()
else:
initvalue_ = dtPosse
self.dtPosse = initvalue_
if isinstance(dtExercicio, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExercicio, '%Y-%m-%d').date()
else:
initvalue_ = dtExercicio
self.dtExercicio = initvalue_
self.tpPlanRP = tpPlanRP
self.infoDecJud = infoDecJud
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoEstatutario)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoEstatutario.subclass:
return infoEstatutario.subclass(*args_, **kwargs_)
else:
return infoEstatutario(*args_, **kwargs_)
factory = staticmethod(factory)
def get_indProvim(self): return self.indProvim
def set_indProvim(self, indProvim): self.indProvim = indProvim
def get_tpProv(self): return self.tpProv
def set_tpProv(self, tpProv): self.tpProv = tpProv
def get_dtNomeacao(self): return self.dtNomeacao
def set_dtNomeacao(self, dtNomeacao): self.dtNomeacao = dtNomeacao
def get_dtPosse(self): return self.dtPosse
def set_dtPosse(self, dtPosse): self.dtPosse = dtPosse
def get_dtExercicio(self): return self.dtExercicio
def set_dtExercicio(self, dtExercicio): self.dtExercicio = dtExercicio
def get_tpPlanRP(self): return self.tpPlanRP
def set_tpPlanRP(self, tpPlanRP): self.tpPlanRP = tpPlanRP
def get_infoDecJud(self): return self.infoDecJud
def set_infoDecJud(self, infoDecJud): self.infoDecJud = infoDecJud
def hasContent_(self):
if (
self.indProvim is not None or
self.tpProv is not None or
self.dtNomeacao is not None or
self.dtPosse is not None or
self.dtExercicio is not None or
self.tpPlanRP is not None or
self.infoDecJud is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoEstatutario', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoEstatutario')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoEstatutario')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoEstatutario', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoEstatutario'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoEstatutario', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.indProvim is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sindProvim>%s</%sindProvim>%s' % (namespace_, self.gds_format_integer(self.indProvim, input_name='indProvim'), namespace_, eol_))
if self.tpProv is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpProv>%s</%stpProv>%s' % (namespace_, self.gds_format_integer(self.tpProv, input_name='tpProv'), namespace_, eol_))
if self.dtNomeacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtNomeacao>%s</%sdtNomeacao>%s' % (namespace_, self.gds_format_date(self.dtNomeacao, input_name='dtNomeacao'), namespace_, eol_))
if self.dtPosse is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtPosse>%s</%sdtPosse>%s' % (namespace_, self.gds_format_date(self.dtPosse, input_name='dtPosse'), namespace_, eol_))
if self.dtExercicio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExercicio>%s</%sdtExercicio>%s' % (namespace_, self.gds_format_date(self.dtExercicio, input_name='dtExercicio'), namespace_, eol_))
if self.tpPlanRP is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpPlanRP>%s</%stpPlanRP>%s' % (namespace_, self.gds_format_integer(self.tpPlanRP, input_name='tpPlanRP'), namespace_, eol_))
if self.infoDecJud is not None:
self.infoDecJud.export(outfile, level, namespace_, name_='infoDecJud', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'indProvim':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'indProvim')
self.indProvim = ival_
elif nodeName_ == 'tpProv':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpProv')
self.tpProv = ival_
elif nodeName_ == 'dtNomeacao':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtNomeacao = dval_
elif nodeName_ == 'dtPosse':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtPosse = dval_
elif nodeName_ == 'dtExercicio':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExercicio = dval_
elif nodeName_ == 'tpPlanRP':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpPlanRP')
self.tpPlanRP = ival_
elif nodeName_ == 'infoDecJud':
obj_ = infoDecJud.factory()
obj_.build(child_)
self.infoDecJud = obj_
obj_.original_tagname_ = 'infoDecJud'
# end class infoEstatutario
class indProvim(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, indProvim)
if subclass is not None:
return subclass(*args_, **kwargs_)
if indProvim.subclass:
return indProvim.subclass(*args_, **kwargs_)
else:
return indProvim(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='indProvim', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('indProvim')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='indProvim')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='indProvim', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indProvim'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='indProvim', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class indProvim
class tpProv(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpProv)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpProv.subclass:
return tpProv.subclass(*args_, **kwargs_)
else:
return tpProv(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpProv', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpProv')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpProv')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpProv', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpProv'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpProv', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpProv
class dtNomeacao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtNomeacao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtNomeacao.subclass:
return dtNomeacao.subclass(*args_, **kwargs_)
else:
return dtNomeacao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtNomeacao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtNomeacao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtNomeacao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtNomeacao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtNomeacao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtNomeacao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtNomeacao
class dtPosse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtPosse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtPosse.subclass:
return dtPosse.subclass(*args_, **kwargs_)
else:
return dtPosse(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtPosse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtPosse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtPosse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtPosse', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtPosse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtPosse', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtPosse
class dtExercicio(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtExercicio)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtExercicio.subclass:
return dtExercicio.subclass(*args_, **kwargs_)
else:
return dtExercicio(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtExercicio', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtExercicio')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtExercicio')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtExercicio', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtExercicio'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtExercicio', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtExercicio
class tpPlanRP(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpPlanRP)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpPlanRP.subclass:
return tpPlanRP.subclass(*args_, **kwargs_)
else:
return tpPlanRP(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpPlanRP', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpPlanRP')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpPlanRP')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpPlanRP', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpPlanRP'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpPlanRP', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpPlanRP
class infoDecJud(GeneratedsSuper):
"""Informações sobre os dados da decisão judicial"""
subclass = None
superclass = None
def __init__(self, nrProcJud=None):
self.original_tagname_ = None
self.nrProcJud = nrProcJud
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoDecJud)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoDecJud.subclass:
return infoDecJud.subclass(*args_, **kwargs_)
else:
return infoDecJud(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrProcJud(self): return self.nrProcJud
def set_nrProcJud(self, nrProcJud): self.nrProcJud = nrProcJud
def hasContent_(self):
if (
self.nrProcJud is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoDecJud', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoDecJud')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoDecJud')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoDecJud', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoDecJud'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoDecJud', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrProcJud is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrProcJud>%s</%snrProcJud>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrProcJud), input_name='nrProcJud')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrProcJud':
nrProcJud_ = child_.text
nrProcJud_ = self.gds_validate_string(nrProcJud_, node, 'nrProcJud')
self.nrProcJud = nrProcJud_
# end class infoDecJud
class nrProcJud(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrProcJud)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrProcJud.subclass:
return nrProcJud.subclass(*args_, **kwargs_)
else:
return nrProcJud(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrProcJud', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrProcJud')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrProcJud')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrProcJud', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrProcJud'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrProcJud', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrProcJud
class sucessaoVinc(GeneratedsSuper):
"""Grupo de informações da sucessão de vínculo trabalhista/estatutário"""
subclass = None
superclass = None
def __init__(self, cnpjEmpregAnt=None, matricAnt=None, dtTransf=None, observacao=None):
self.original_tagname_ = None
self.cnpjEmpregAnt = cnpjEmpregAnt
self.matricAnt = matricAnt
if isinstance(dtTransf, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtTransf, '%Y-%m-%d').date()
else:
initvalue_ = dtTransf
self.dtTransf = initvalue_
self.observacao = observacao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, sucessaoVinc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if sucessaoVinc.subclass:
return sucessaoVinc.subclass(*args_, **kwargs_)
else:
return sucessaoVinc(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cnpjEmpregAnt(self): return self.cnpjEmpregAnt
def set_cnpjEmpregAnt(self, cnpjEmpregAnt): self.cnpjEmpregAnt = cnpjEmpregAnt
def get_matricAnt(self): return self.matricAnt
def set_matricAnt(self, matricAnt): self.matricAnt = matricAnt
def get_dtTransf(self): return self.dtTransf
def set_dtTransf(self, dtTransf): self.dtTransf = dtTransf
def get_observacao(self): return self.observacao
def set_observacao(self, observacao): self.observacao = observacao
def hasContent_(self):
if (
self.cnpjEmpregAnt is not None or
self.matricAnt is not None or
self.dtTransf is not None or
self.observacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='sucessaoVinc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('sucessaoVinc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='sucessaoVinc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='sucessaoVinc', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='sucessaoVinc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='sucessaoVinc', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cnpjEmpregAnt is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scnpjEmpregAnt>%s</%scnpjEmpregAnt>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cnpjEmpregAnt), input_name='cnpjEmpregAnt')), namespace_, eol_))
if self.matricAnt is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%smatricAnt>%s</%smatricAnt>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.matricAnt), input_name='matricAnt')), namespace_, eol_))
if self.dtTransf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtTransf>%s</%sdtTransf>%s' % (namespace_, self.gds_format_date(self.dtTransf, input_name='dtTransf'), namespace_, eol_))
if self.observacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sobservacao>%s</%sobservacao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.observacao), input_name='observacao')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cnpjEmpregAnt':
cnpjEmpregAnt_ = child_.text
cnpjEmpregAnt_ = self.gds_validate_string(cnpjEmpregAnt_, node, 'cnpjEmpregAnt')
self.cnpjEmpregAnt = cnpjEmpregAnt_
elif nodeName_ == 'matricAnt':
matricAnt_ = child_.text
matricAnt_ = self.gds_validate_string(matricAnt_, node, 'matricAnt')
self.matricAnt = matricAnt_
elif nodeName_ == 'dtTransf':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtTransf = dval_
elif nodeName_ == 'observacao':
observacao_ = child_.text
observacao_ = self.gds_validate_string(observacao_, node, 'observacao')
self.observacao = observacao_
# end class sucessaoVinc
class cnpjEmpregAnt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cnpjEmpregAnt)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cnpjEmpregAnt.subclass:
return cnpjEmpregAnt.subclass(*args_, **kwargs_)
else:
return cnpjEmpregAnt(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cnpjEmpregAnt', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cnpjEmpregAnt')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cnpjEmpregAnt')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cnpjEmpregAnt', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cnpjEmpregAnt'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cnpjEmpregAnt', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cnpjEmpregAnt
class matricAnt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, matricAnt)
if subclass is not None:
return subclass(*args_, **kwargs_)
if matricAnt.subclass:
return matricAnt.subclass(*args_, **kwargs_)
else:
return matricAnt(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='matricAnt', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('matricAnt')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='matricAnt')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='matricAnt', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='matricAnt'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='matricAnt', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class matricAnt
class dtTransf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtTransf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtTransf.subclass:
return dtTransf.subclass(*args_, **kwargs_)
else:
return dtTransf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtTransf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtTransf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtTransf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtTransf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtTransf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtTransf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtTransf
class transfDom(GeneratedsSuper):
"""Informações do empregado doméstico transferido de outro
representante da mesma unidade familiar"""
subclass = None
superclass = None
def __init__(self, cpfSubstituido=None, matricAnt=None, dtTransf=None):
self.original_tagname_ = None
self.cpfSubstituido = cpfSubstituido
self.matricAnt = matricAnt
if isinstance(dtTransf, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtTransf, '%Y-%m-%d').date()
else:
initvalue_ = dtTransf
self.dtTransf = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, transfDom)
if subclass is not None:
return subclass(*args_, **kwargs_)
if transfDom.subclass:
return transfDom.subclass(*args_, **kwargs_)
else:
return transfDom(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cpfSubstituido(self): return self.cpfSubstituido
def set_cpfSubstituido(self, cpfSubstituido): self.cpfSubstituido = cpfSubstituido
def get_matricAnt(self): return self.matricAnt
def set_matricAnt(self, matricAnt): self.matricAnt = matricAnt
def get_dtTransf(self): return self.dtTransf
def set_dtTransf(self, dtTransf): self.dtTransf = dtTransf
def hasContent_(self):
if (
self.cpfSubstituido is not None or
self.matricAnt is not None or
self.dtTransf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='transfDom', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('transfDom')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='transfDom')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='transfDom', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='transfDom'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='transfDom', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cpfSubstituido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfSubstituido>%s</%scpfSubstituido>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfSubstituido), input_name='cpfSubstituido')), namespace_, eol_))
if self.matricAnt is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%smatricAnt>%s</%smatricAnt>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.matricAnt), input_name='matricAnt')), namespace_, eol_))
if self.dtTransf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtTransf>%s</%sdtTransf>%s' % (namespace_, self.gds_format_date(self.dtTransf, input_name='dtTransf'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cpfSubstituido':
cpfSubstituido_ = child_.text
cpfSubstituido_ = self.gds_validate_string(cpfSubstituido_, node, 'cpfSubstituido')
self.cpfSubstituido = cpfSubstituido_
elif nodeName_ == 'matricAnt':
matricAnt_ = child_.text
matricAnt_ = self.gds_validate_string(matricAnt_, node, 'matricAnt')
self.matricAnt = matricAnt_
elif nodeName_ == 'dtTransf':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtTransf = dval_
# end class transfDom
class cpfSubstituido(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfSubstituido)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfSubstituido.subclass:
return cpfSubstituido.subclass(*args_, **kwargs_)
else:
return cpfSubstituido(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfSubstituido', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfSubstituido')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfSubstituido')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfSubstituido', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfSubstituido'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfSubstituido', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfSubstituido
class afastamento(GeneratedsSuper):
"""Informações de afastamento do trabalhador. Preenchimento exclusivo
em caso de trabalhador que permaneça afastado na data de início
da obrigatoriedade do empregador no eSocial."""
subclass = None
superclass = None
def __init__(self, dtIniAfast=None, codMotAfast=None):
self.original_tagname_ = None
if isinstance(dtIniAfast, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtIniAfast, '%Y-%m-%d').date()
else:
initvalue_ = dtIniAfast
self.dtIniAfast = initvalue_
self.codMotAfast = codMotAfast
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, afastamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if afastamento.subclass:
return afastamento.subclass(*args_, **kwargs_)
else:
return afastamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtIniAfast(self): return self.dtIniAfast
def set_dtIniAfast(self, dtIniAfast): self.dtIniAfast = dtIniAfast
def get_codMotAfast(self): return self.codMotAfast
def set_codMotAfast(self, codMotAfast): self.codMotAfast = codMotAfast
def hasContent_(self):
if (
self.dtIniAfast is not None or
self.codMotAfast is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='afastamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('afastamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='afastamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='afastamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='afastamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='afastamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtIniAfast is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtIniAfast>%s</%sdtIniAfast>%s' % (namespace_, self.gds_format_date(self.dtIniAfast, input_name='dtIniAfast'), namespace_, eol_))
if self.codMotAfast is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodMotAfast>%s</%scodMotAfast>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codMotAfast), input_name='codMotAfast')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtIniAfast':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtIniAfast = dval_
elif nodeName_ == 'codMotAfast':
codMotAfast_ = child_.text
codMotAfast_ = self.gds_validate_string(codMotAfast_, node, 'codMotAfast')
self.codMotAfast = codMotAfast_
# end class afastamento
class dtIniAfast(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtIniAfast)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtIniAfast.subclass:
return dtIniAfast.subclass(*args_, **kwargs_)
else:
return dtIniAfast(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtIniAfast', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtIniAfast')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtIniAfast')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtIniAfast', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtIniAfast'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtIniAfast', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtIniAfast
class codMotAfast(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codMotAfast)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codMotAfast.subclass:
return codMotAfast.subclass(*args_, **kwargs_)
else:
return codMotAfast(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codMotAfast', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codMotAfast')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codMotAfast')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codMotAfast', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codMotAfast'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codMotAfast', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codMotAfast
class desligamento(GeneratedsSuper):
"""Informações do desligamento do trabalhador. Registro preenchido
exclusivamente caso seja necessário enviar cadastramento inicial
referente a trabalhador que já tenha sido desligado da empresa
antes do início do eSocial (ex: envio para pagamento de
diferenças salariais - acordo/dissídio/convenção coletiva - em
meses posteriores ao desligamento e sob vigência do eSocial)"""
subclass = None
superclass = None
def __init__(self, dtDeslig=None):
self.original_tagname_ = None
if isinstance(dtDeslig, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtDeslig, '%Y-%m-%d').date()
else:
initvalue_ = dtDeslig
self.dtDeslig = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, desligamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if desligamento.subclass:
return desligamento.subclass(*args_, **kwargs_)
else:
return desligamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtDeslig(self): return self.dtDeslig
def set_dtDeslig(self, dtDeslig): self.dtDeslig = dtDeslig
def hasContent_(self):
if (
self.dtDeslig is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='desligamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('desligamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='desligamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='desligamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='desligamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='desligamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtDeslig is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtDeslig>%s</%sdtDeslig>%s' % (namespace_, self.gds_format_date(self.dtDeslig, input_name='dtDeslig'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtDeslig':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtDeslig = dval_
# end class desligamento
class dtDeslig(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtDeslig)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtDeslig.subclass:
return dtDeslig.subclass(*args_, **kwargs_)
else:
return dtDeslig(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtDeslig', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtDeslig')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtDeslig')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtDeslig', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtDeslig'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtDeslig', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtDeslig
class TIdeEveTrab(GeneratedsSuper):
"""Identificação do evento"""
subclass = None
superclass = None
def __init__(self, indRetif=None, nrRecibo=None, tpAmb=None, procEmi=None, verProc=None):
self.original_tagname_ = None
self.indRetif = indRetif
self.nrRecibo = nrRecibo
self.tpAmb = tpAmb
self.procEmi = procEmi
self.verProc = verProc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TIdeEveTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TIdeEveTrab.subclass:
return TIdeEveTrab.subclass(*args_, **kwargs_)
else:
return TIdeEveTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_indRetif(self): return self.indRetif
def set_indRetif(self, indRetif): self.indRetif = indRetif
def get_nrRecibo(self): return self.nrRecibo
def set_nrRecibo(self, nrRecibo): self.nrRecibo = nrRecibo
def get_tpAmb(self): return self.tpAmb
def set_tpAmb(self, tpAmb): self.tpAmb = tpAmb
def get_procEmi(self): return self.procEmi
def set_procEmi(self, procEmi): self.procEmi = procEmi
def get_verProc(self): return self.verProc
def set_verProc(self, verProc): self.verProc = verProc
def hasContent_(self):
if (
self.indRetif is not None or
self.nrRecibo is not None or
self.tpAmb is not None or
self.procEmi is not None or
self.verProc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TIdeEveTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TIdeEveTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TIdeEveTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TIdeEveTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TIdeEveTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TIdeEveTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.indRetif is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sindRetif>%s</%sindRetif>%s' % (namespace_, self.gds_format_integer(self.indRetif, input_name='indRetif'), namespace_, eol_))
if self.nrRecibo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRecibo>%s</%snrRecibo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRecibo), input_name='nrRecibo')), namespace_, eol_))
if self.tpAmb is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpAmb>%s</%stpAmb>%s' % (namespace_, self.gds_format_integer(self.tpAmb, input_name='tpAmb'), namespace_, eol_))
if self.procEmi is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sprocEmi>%s</%sprocEmi>%s' % (namespace_, self.gds_format_integer(self.procEmi, input_name='procEmi'), namespace_, eol_))
if self.verProc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sverProc>%s</%sverProc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.verProc), input_name='verProc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'indRetif':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'indRetif')
self.indRetif = ival_
elif nodeName_ == 'nrRecibo':
nrRecibo_ = child_.text
nrRecibo_ = self.gds_validate_string(nrRecibo_, node, 'nrRecibo')
self.nrRecibo = nrRecibo_
elif nodeName_ == 'tpAmb':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpAmb')
self.tpAmb = ival_
elif nodeName_ == 'procEmi':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'procEmi')
self.procEmi = ival_
elif nodeName_ == 'verProc':
verProc_ = child_.text
verProc_ = self.gds_validate_string(verProc_, node, 'verProc')
self.verProc = verProc_
# end class TIdeEveTrab
class indRetif(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, indRetif)
if subclass is not None:
return subclass(*args_, **kwargs_)
if indRetif.subclass:
return indRetif.subclass(*args_, **kwargs_)
else:
return indRetif(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='indRetif', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('indRetif')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='indRetif')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='indRetif', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indRetif'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='indRetif', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class indRetif
class nrRecibo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRecibo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRecibo.subclass:
return nrRecibo.subclass(*args_, **kwargs_)
else:
return nrRecibo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRecibo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRecibo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRecibo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRecibo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRecibo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRecibo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRecibo
class tpAmb(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpAmb)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpAmb.subclass:
return tpAmb.subclass(*args_, **kwargs_)
else:
return tpAmb(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpAmb', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpAmb')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpAmb')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpAmb', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpAmb'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpAmb', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpAmb
class procEmi(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, procEmi)
if subclass is not None:
return subclass(*args_, **kwargs_)
if procEmi.subclass:
return procEmi.subclass(*args_, **kwargs_)
else:
return procEmi(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='procEmi', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('procEmi')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='procEmi')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='procEmi', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='procEmi'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='procEmi', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class procEmi
class verProc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, verProc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if verProc.subclass:
return verProc.subclass(*args_, **kwargs_)
else:
return verProc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='verProc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('verProc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='verProc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='verProc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='verProc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='verProc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class verProc
class TEmpregador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEmpregador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEmpregador.subclass:
return TEmpregador.subclass(*args_, **kwargs_)
else:
return TEmpregador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEmpregador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEmpregador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEmpregador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEmpregador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEmpregador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEmpregador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
# end class TEmpregador
class TCtps(GeneratedsSuper):
"""Carteira de Trabalho e Previdência Social"""
subclass = None
superclass = None
def __init__(self, nrCtps=None, serieCtps=None, ufCtps=None):
self.original_tagname_ = None
self.nrCtps = nrCtps
self.serieCtps = serieCtps
self.ufCtps = ufCtps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TCtps.subclass:
return TCtps.subclass(*args_, **kwargs_)
else:
return TCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrCtps(self): return self.nrCtps
def set_nrCtps(self, nrCtps): self.nrCtps = nrCtps
def get_serieCtps(self): return self.serieCtps
def set_serieCtps(self, serieCtps): self.serieCtps = serieCtps
def get_ufCtps(self): return self.ufCtps
def set_ufCtps(self, ufCtps): self.ufCtps = ufCtps
def hasContent_(self):
if (
self.nrCtps is not None or
self.serieCtps is not None or
self.ufCtps is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TCtps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TCtps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrCtps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrCtps>%s</%snrCtps>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrCtps), input_name='nrCtps')), namespace_, eol_))
if self.serieCtps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sserieCtps>%s</%sserieCtps>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.serieCtps), input_name='serieCtps')), namespace_, eol_))
if self.ufCtps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sufCtps>%s</%sufCtps>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.ufCtps), input_name='ufCtps')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrCtps':
nrCtps_ = child_.text
nrCtps_ = self.gds_validate_string(nrCtps_, node, 'nrCtps')
self.nrCtps = nrCtps_
elif nodeName_ == 'serieCtps':
serieCtps_ = child_.text
serieCtps_ = self.gds_validate_string(serieCtps_, node, 'serieCtps')
self.serieCtps = serieCtps_
elif nodeName_ == 'ufCtps':
ufCtps_ = child_.text
ufCtps_ = self.gds_validate_string(ufCtps_, node, 'ufCtps')
self.ufCtps = ufCtps_
# end class TCtps
class nrCtps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrCtps.subclass:
return nrCtps.subclass(*args_, **kwargs_)
else:
return nrCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrCtps', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrCtps', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrCtps
class serieCtps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, serieCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if serieCtps.subclass:
return serieCtps.subclass(*args_, **kwargs_)
else:
return serieCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='serieCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('serieCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='serieCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='serieCtps', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='serieCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='serieCtps', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class serieCtps
class ufCtps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ufCtps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ufCtps.subclass:
return ufCtps.subclass(*args_, **kwargs_)
else:
return ufCtps(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ufCtps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ufCtps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ufCtps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ufCtps', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ufCtps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ufCtps', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ufCtps
class TRic(GeneratedsSuper):
"""Registro de Identificação Civil"""
subclass = None
superclass = None
def __init__(self, nrRic=None, orgaoEmissor=None, dtExped=None):
self.original_tagname_ = None
self.nrRic = nrRic
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRic.subclass:
return TRic.subclass(*args_, **kwargs_)
else:
return TRic(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRic(self): return self.nrRic
def set_nrRic(self, nrRic): self.nrRic = nrRic
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def hasContent_(self):
if (
self.nrRic is not None or
self.orgaoEmissor is not None or
self.dtExped is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRic', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRic', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRic>%s</%snrRic>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRic), input_name='nrRic')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRic':
nrRic_ = child_.text
nrRic_ = self.gds_validate_string(nrRic_, node, 'nrRic')
self.nrRic = nrRic_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
# end class TRic
class nrRic(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRic.subclass:
return nrRic.subclass(*args_, **kwargs_)
else:
return nrRic(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRic', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRic', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRic
class orgaoEmissor(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, orgaoEmissor)
if subclass is not None:
return subclass(*args_, **kwargs_)
if orgaoEmissor.subclass:
return orgaoEmissor.subclass(*args_, **kwargs_)
else:
return orgaoEmissor(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='orgaoEmissor', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('orgaoEmissor')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='orgaoEmissor')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='orgaoEmissor', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='orgaoEmissor'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='orgaoEmissor', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class orgaoEmissor
class dtExped(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtExped)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtExped.subclass:
return dtExped.subclass(*args_, **kwargs_)
else:
return dtExped(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtExped', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtExped')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtExped')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtExped', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtExped'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtExped', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtExped
class TRg(GeneratedsSuper):
"""Registro Geral"""
subclass = None
superclass = None
def __init__(self, nrRg=None, orgaoEmissor=None, dtExped=None):
self.original_tagname_ = None
self.nrRg = nrRg
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRg.subclass:
return TRg.subclass(*args_, **kwargs_)
else:
return TRg(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRg(self): return self.nrRg
def set_nrRg(self, nrRg): self.nrRg = nrRg
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def hasContent_(self):
if (
self.nrRg is not None or
self.orgaoEmissor is not None or
self.dtExped is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRg', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRg', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRg is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRg>%s</%snrRg>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRg), input_name='nrRg')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRg':
nrRg_ = child_.text
nrRg_ = self.gds_validate_string(nrRg_, node, 'nrRg')
self.nrRg = nrRg_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
# end class TRg
class nrRg(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRg.subclass:
return nrRg.subclass(*args_, **kwargs_)
else:
return nrRg(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRg', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRg', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRg
class TRne(GeneratedsSuper):
"""Registro Nacional de Estrangeiros"""
subclass = None
superclass = None
def __init__(self, nrRne=None, orgaoEmissor=None, dtExped=None):
self.original_tagname_ = None
self.nrRne = nrRne
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRne)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRne.subclass:
return TRne.subclass(*args_, **kwargs_)
else:
return TRne(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRne(self): return self.nrRne
def set_nrRne(self, nrRne): self.nrRne = nrRne
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def hasContent_(self):
if (
self.nrRne is not None or
self.orgaoEmissor is not None or
self.dtExped is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRne', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRne')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRne')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRne', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRne'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRne', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRne is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRne>%s</%snrRne>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRne), input_name='nrRne')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRne':
nrRne_ = child_.text
nrRne_ = self.gds_validate_string(nrRne_, node, 'nrRne')
self.nrRne = nrRne_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
# end class TRne
class nrRne(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRne)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRne.subclass:
return nrRne.subclass(*args_, **kwargs_)
else:
return nrRne(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRne', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRne')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRne')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRne', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRne'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRne', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRne
class TOc(GeneratedsSuper):
"""Órgão de Classe"""
subclass = None
superclass = None
def __init__(self, nrOc=None, orgaoEmissor=None, dtExped=None, dtValid=None):
self.original_tagname_ = None
self.nrOc = nrOc
self.orgaoEmissor = orgaoEmissor
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
if isinstance(dtValid, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtValid, '%Y-%m-%d').date()
else:
initvalue_ = dtValid
self.dtValid = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TOc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TOc.subclass:
return TOc.subclass(*args_, **kwargs_)
else:
return TOc(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrOc(self): return self.nrOc
def set_nrOc(self, nrOc): self.nrOc = nrOc
def get_orgaoEmissor(self): return self.orgaoEmissor
def set_orgaoEmissor(self, orgaoEmissor): self.orgaoEmissor = orgaoEmissor
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def get_dtValid(self): return self.dtValid
def set_dtValid(self, dtValid): self.dtValid = dtValid
def hasContent_(self):
if (
self.nrOc is not None or
self.orgaoEmissor is not None or
self.dtExped is not None or
self.dtValid is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TOc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TOc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TOc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TOc', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TOc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TOc', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrOc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrOc>%s</%snrOc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrOc), input_name='nrOc')), namespace_, eol_))
if self.orgaoEmissor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgaoEmissor>%s</%sorgaoEmissor>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.orgaoEmissor), input_name='orgaoEmissor')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
if self.dtValid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtValid>%s</%sdtValid>%s' % (namespace_, self.gds_format_date(self.dtValid, input_name='dtValid'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrOc':
nrOc_ = child_.text
nrOc_ = self.gds_validate_string(nrOc_, node, 'nrOc')
self.nrOc = nrOc_
elif nodeName_ == 'orgaoEmissor':
orgaoEmissor_ = child_.text
orgaoEmissor_ = self.gds_validate_string(orgaoEmissor_, node, 'orgaoEmissor')
self.orgaoEmissor = orgaoEmissor_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
elif nodeName_ == 'dtValid':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtValid = dval_
# end class TOc
class nrOc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrOc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrOc.subclass:
return nrOc.subclass(*args_, **kwargs_)
else:
return nrOc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrOc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrOc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrOc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrOc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrOc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrOc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrOc
class dtValid(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtValid)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtValid.subclass:
return dtValid.subclass(*args_, **kwargs_)
else:
return dtValid(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtValid', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtValid')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtValid')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtValid', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtValid'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtValid', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtValid
class TCnh(GeneratedsSuper):
"""Cartera Nacional de Habilitação"""
subclass = None
superclass = None
def __init__(self, nrRegCnh=None, dtExped=None, ufCnh=None, dtValid=None, dtPriHab=None, categoriaCnh=None):
self.original_tagname_ = None
self.nrRegCnh = nrRegCnh
if isinstance(dtExped, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtExped, '%Y-%m-%d').date()
else:
initvalue_ = dtExped
self.dtExped = initvalue_
self.ufCnh = ufCnh
if isinstance(dtValid, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtValid, '%Y-%m-%d').date()
else:
initvalue_ = dtValid
self.dtValid = initvalue_
if isinstance(dtPriHab, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtPriHab, '%Y-%m-%d').date()
else:
initvalue_ = dtPriHab
self.dtPriHab = initvalue_
self.categoriaCnh = categoriaCnh
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TCnh.subclass:
return TCnh.subclass(*args_, **kwargs_)
else:
return TCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrRegCnh(self): return self.nrRegCnh
def set_nrRegCnh(self, nrRegCnh): self.nrRegCnh = nrRegCnh
def get_dtExped(self): return self.dtExped
def set_dtExped(self, dtExped): self.dtExped = dtExped
def get_ufCnh(self): return self.ufCnh
def set_ufCnh(self, ufCnh): self.ufCnh = ufCnh
def get_dtValid(self): return self.dtValid
def set_dtValid(self, dtValid): self.dtValid = dtValid
def get_dtPriHab(self): return self.dtPriHab
def set_dtPriHab(self, dtPriHab): self.dtPriHab = dtPriHab
def get_categoriaCnh(self): return self.categoriaCnh
def set_categoriaCnh(self, categoriaCnh): self.categoriaCnh = categoriaCnh
def hasContent_(self):
if (
self.nrRegCnh is not None or
self.dtExped is not None or
self.ufCnh is not None or
self.dtValid is not None or
self.dtPriHab is not None or
self.categoriaCnh is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TCnh', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TCnh', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrRegCnh is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRegCnh>%s</%snrRegCnh>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRegCnh), input_name='nrRegCnh')), namespace_, eol_))
if self.dtExped is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtExped>%s</%sdtExped>%s' % (namespace_, self.gds_format_date(self.dtExped, input_name='dtExped'), namespace_, eol_))
if self.ufCnh is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sufCnh>%s</%sufCnh>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.ufCnh), input_name='ufCnh')), namespace_, eol_))
if self.dtValid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtValid>%s</%sdtValid>%s' % (namespace_, self.gds_format_date(self.dtValid, input_name='dtValid'), namespace_, eol_))
if self.dtPriHab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtPriHab>%s</%sdtPriHab>%s' % (namespace_, self.gds_format_date(self.dtPriHab, input_name='dtPriHab'), namespace_, eol_))
if self.categoriaCnh is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scategoriaCnh>%s</%scategoriaCnh>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.categoriaCnh), input_name='categoriaCnh')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrRegCnh':
nrRegCnh_ = child_.text
nrRegCnh_ = self.gds_validate_string(nrRegCnh_, node, 'nrRegCnh')
self.nrRegCnh = nrRegCnh_
elif nodeName_ == 'dtExped':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtExped = dval_
elif nodeName_ == 'ufCnh':
ufCnh_ = child_.text
ufCnh_ = self.gds_validate_string(ufCnh_, node, 'ufCnh')
self.ufCnh = ufCnh_
elif nodeName_ == 'dtValid':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtValid = dval_
elif nodeName_ == 'dtPriHab':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtPriHab = dval_
elif nodeName_ == 'categoriaCnh':
categoriaCnh_ = child_.text
categoriaCnh_ = self.gds_validate_string(categoriaCnh_, node, 'categoriaCnh')
self.categoriaCnh = categoriaCnh_
# end class TCnh
class nrRegCnh(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRegCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRegCnh.subclass:
return nrRegCnh.subclass(*args_, **kwargs_)
else:
return nrRegCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRegCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRegCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRegCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRegCnh', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRegCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRegCnh', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRegCnh
class ufCnh(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ufCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ufCnh.subclass:
return ufCnh.subclass(*args_, **kwargs_)
else:
return ufCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ufCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ufCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ufCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ufCnh', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ufCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ufCnh', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ufCnh
class dtPriHab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtPriHab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtPriHab.subclass:
return dtPriHab.subclass(*args_, **kwargs_)
else:
return dtPriHab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtPriHab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtPriHab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtPriHab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtPriHab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtPriHab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtPriHab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtPriHab
class categoriaCnh(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, categoriaCnh)
if subclass is not None:
return subclass(*args_, **kwargs_)
if categoriaCnh.subclass:
return categoriaCnh.subclass(*args_, **kwargs_)
else:
return categoriaCnh(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='categoriaCnh', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('categoriaCnh')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='categoriaCnh')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='categoriaCnh', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='categoriaCnh'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='categoriaCnh', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class categoriaCnh
class TEnderecoBrasil(GeneratedsSuper):
"""Informações do Endereço no Brasil"""
subclass = None
superclass = None
def __init__(self, tpLograd=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, cep=None, codMunic=None, uf=None):
self.original_tagname_ = None
self.tpLograd = tpLograd
self.dscLograd = dscLograd
self.nrLograd = nrLograd
self.complemento = complemento
self.bairro = bairro
self.cep = cep
self.codMunic = codMunic
self.uf = uf
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEnderecoBrasil)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEnderecoBrasil.subclass:
return TEnderecoBrasil.subclass(*args_, **kwargs_)
else:
return TEnderecoBrasil(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpLograd(self): return self.tpLograd
def set_tpLograd(self, tpLograd): self.tpLograd = tpLograd
def get_dscLograd(self): return self.dscLograd
def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd
def get_nrLograd(self): return self.nrLograd
def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd
def get_complemento(self): return self.complemento
def set_complemento(self, complemento): self.complemento = complemento
def get_bairro(self): return self.bairro
def set_bairro(self, bairro): self.bairro = bairro
def get_cep(self): return self.cep
def set_cep(self, cep): self.cep = cep
def get_codMunic(self): return self.codMunic
def set_codMunic(self, codMunic): self.codMunic = codMunic
def get_uf(self): return self.uf
def set_uf(self, uf): self.uf = uf
def hasContent_(self):
if (
self.tpLograd is not None or
self.dscLograd is not None or
self.nrLograd is not None or
self.complemento is not None or
self.bairro is not None or
self.cep is not None or
self.codMunic is not None or
self.uf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEnderecoBrasil', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoBrasil')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoBrasil')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoBrasil', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoBrasil'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoBrasil', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpLograd>%s</%stpLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.tpLograd), input_name='tpLograd')), namespace_, eol_))
if self.dscLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_))
if self.nrLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_))
if self.complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_))
if self.bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_))
if self.cep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scep>%s</%scep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cep), input_name='cep')), namespace_, eol_))
if self.codMunic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodMunic>%s</%scodMunic>%s' % (namespace_, self.gds_format_integer(self.codMunic, input_name='codMunic'), namespace_, eol_))
if self.uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%suf>%s</%suf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.uf), input_name='uf')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpLograd':
tpLograd_ = child_.text
tpLograd_ = self.gds_validate_string(tpLograd_, node, 'tpLograd')
self.tpLograd = tpLograd_
elif nodeName_ == 'dscLograd':
dscLograd_ = child_.text
dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd')
self.dscLograd = dscLograd_
elif nodeName_ == 'nrLograd':
nrLograd_ = child_.text
nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd')
self.nrLograd = nrLograd_
elif nodeName_ == 'complemento':
complemento_ = child_.text
complemento_ = self.gds_validate_string(complemento_, node, 'complemento')
self.complemento = complemento_
elif nodeName_ == 'bairro':
bairro_ = child_.text
bairro_ = self.gds_validate_string(bairro_, node, 'bairro')
self.bairro = bairro_
elif nodeName_ == 'cep':
cep_ = child_.text
cep_ = self.gds_validate_string(cep_, node, 'cep')
self.cep = cep_
elif nodeName_ == 'codMunic':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'codMunic')
self.codMunic = ival_
elif nodeName_ == 'uf':
uf_ = child_.text
uf_ = self.gds_validate_string(uf_, node, 'uf')
self.uf = uf_
# end class TEnderecoBrasil
class tpLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLograd.subclass:
return tpLograd.subclass(*args_, **kwargs_)
else:
return tpLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpLograd
class dscLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscLograd.subclass:
return dscLograd.subclass(*args_, **kwargs_)
else:
return dscLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscLograd
class nrLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrLograd.subclass:
return nrLograd.subclass(*args_, **kwargs_)
else:
return nrLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrLograd
class complemento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, complemento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if complemento.subclass:
return complemento.subclass(*args_, **kwargs_)
else:
return complemento(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='complemento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('complemento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='complemento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='complemento', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='complemento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='complemento', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class complemento
class bairro(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, bairro)
if subclass is not None:
return subclass(*args_, **kwargs_)
if bairro.subclass:
return bairro.subclass(*args_, **kwargs_)
else:
return bairro(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='bairro', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('bairro')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='bairro')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='bairro', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='bairro'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='bairro', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class bairro
class cep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cep.subclass:
return cep.subclass(*args_, **kwargs_)
else:
return cep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cep
class TEnderecoExterior(GeneratedsSuper):
"""Informações do Endereço no Exterior"""
subclass = None
superclass = None
def __init__(self, paisResid=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, nmCid=None, codPostal=None):
self.original_tagname_ = None
self.paisResid = paisResid
self.dscLograd = dscLograd
self.nrLograd = nrLograd
self.complemento = complemento
self.bairro = bairro
self.nmCid = nmCid
self.codPostal = codPostal
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEnderecoExterior)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEnderecoExterior.subclass:
return TEnderecoExterior.subclass(*args_, **kwargs_)
else:
return TEnderecoExterior(*args_, **kwargs_)
factory = staticmethod(factory)
def get_paisResid(self): return self.paisResid
def set_paisResid(self, paisResid): self.paisResid = paisResid
def get_dscLograd(self): return self.dscLograd
def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd
def get_nrLograd(self): return self.nrLograd
def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd
def get_complemento(self): return self.complemento
def set_complemento(self, complemento): self.complemento = complemento
def get_bairro(self): return self.bairro
def set_bairro(self, bairro): self.bairro = bairro
def get_nmCid(self): return self.nmCid
def set_nmCid(self, nmCid): self.nmCid = nmCid
def get_codPostal(self): return self.codPostal
def set_codPostal(self, codPostal): self.codPostal = codPostal
def hasContent_(self):
if (
self.paisResid is not None or
self.dscLograd is not None or
self.nrLograd is not None or
self.complemento is not None or
self.bairro is not None or
self.nmCid is not None or
self.codPostal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEnderecoExterior', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoExterior')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoExterior')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoExterior', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoExterior'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoExterior', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.paisResid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%spaisResid>%s</%spaisResid>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisResid), input_name='paisResid')), namespace_, eol_))
if self.dscLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_))
if self.nrLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_))
if self.complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_))
if self.bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_))
if self.nmCid is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmCid>%s</%snmCid>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmCid), input_name='nmCid')), namespace_, eol_))
if self.codPostal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodPostal>%s</%scodPostal>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codPostal), input_name='codPostal')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'paisResid':
paisResid_ = child_.text
paisResid_ = self.gds_validate_string(paisResid_, node, 'paisResid')
self.paisResid = paisResid_
elif nodeName_ == 'dscLograd':
dscLograd_ = child_.text
dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd')
self.dscLograd = dscLograd_
elif nodeName_ == 'nrLograd':
nrLograd_ = child_.text
nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd')
self.nrLograd = nrLograd_
elif nodeName_ == 'complemento':
complemento_ = child_.text
complemento_ = self.gds_validate_string(complemento_, node, 'complemento')
self.complemento = complemento_
elif nodeName_ == 'bairro':
bairro_ = child_.text
bairro_ = self.gds_validate_string(bairro_, node, 'bairro')
self.bairro = bairro_
elif nodeName_ == 'nmCid':
nmCid_ = child_.text
nmCid_ = self.gds_validate_string(nmCid_, node, 'nmCid')
self.nmCid = nmCid_
elif nodeName_ == 'codPostal':
codPostal_ = child_.text
codPostal_ = self.gds_validate_string(codPostal_, node, 'codPostal')
self.codPostal = codPostal_
# end class TEnderecoExterior
class paisResid(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, paisResid)
if subclass is not None:
return subclass(*args_, **kwargs_)
if paisResid.subclass:
return paisResid.subclass(*args_, **kwargs_)
else:
return paisResid(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='paisResid', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisResid')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisResid')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='paisResid', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisResid'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='paisResid', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class paisResid
class nmCid(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmCid)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmCid.subclass:
return nmCid.subclass(*args_, **kwargs_)
else:
return nmCid(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmCid', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmCid')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmCid')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmCid', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmCid'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmCid', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmCid
class codPostal(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codPostal)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codPostal.subclass:
return codPostal.subclass(*args_, **kwargs_)
else:
return codPostal(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codPostal', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codPostal')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codPostal')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codPostal', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codPostal'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codPostal', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codPostal
class TTrabEstrang(GeneratedsSuper):
"""Informações do Trabalhador Estrangeiro"""
subclass = None
superclass = None
def __init__(self, dtChegada=None, classTrabEstrang=None, casadoBr=None, filhosBr=None):
self.original_tagname_ = None
if isinstance(dtChegada, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtChegada, '%Y-%m-%d').date()
else:
initvalue_ = dtChegada
self.dtChegada = initvalue_
self.classTrabEstrang = classTrabEstrang
self.casadoBr = casadoBr
self.filhosBr = filhosBr
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TTrabEstrang)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TTrabEstrang.subclass:
return TTrabEstrang.subclass(*args_, **kwargs_)
else:
return TTrabEstrang(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtChegada(self): return self.dtChegada
def set_dtChegada(self, dtChegada): self.dtChegada = dtChegada
def get_classTrabEstrang(self): return self.classTrabEstrang
def set_classTrabEstrang(self, classTrabEstrang): self.classTrabEstrang = classTrabEstrang
def get_casadoBr(self): return self.casadoBr
def set_casadoBr(self, casadoBr): self.casadoBr = casadoBr
def get_filhosBr(self): return self.filhosBr
def set_filhosBr(self, filhosBr): self.filhosBr = filhosBr
def hasContent_(self):
if (
self.dtChegada is not None or
self.classTrabEstrang is not None or
self.casadoBr is not None or
self.filhosBr is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TTrabEstrang', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TTrabEstrang')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TTrabEstrang')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TTrabEstrang', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TTrabEstrang'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TTrabEstrang', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtChegada is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtChegada>%s</%sdtChegada>%s' % (namespace_, self.gds_format_date(self.dtChegada, input_name='dtChegada'), namespace_, eol_))
if self.classTrabEstrang is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sclassTrabEstrang>%s</%sclassTrabEstrang>%s' % (namespace_, self.gds_format_integer(self.classTrabEstrang, input_name='classTrabEstrang'), namespace_, eol_))
if self.casadoBr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scasadoBr>%s</%scasadoBr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.casadoBr), input_name='casadoBr')), namespace_, eol_))
if self.filhosBr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sfilhosBr>%s</%sfilhosBr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.filhosBr), input_name='filhosBr')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtChegada':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtChegada = dval_
elif nodeName_ == 'classTrabEstrang':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'classTrabEstrang')
self.classTrabEstrang = ival_
elif nodeName_ == 'casadoBr':
casadoBr_ = child_.text
casadoBr_ = self.gds_validate_string(casadoBr_, node, 'casadoBr')
self.casadoBr = casadoBr_
elif nodeName_ == 'filhosBr':
filhosBr_ = child_.text
filhosBr_ = self.gds_validate_string(filhosBr_, node, 'filhosBr')
self.filhosBr = filhosBr_
# end class TTrabEstrang
class dtChegada(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtChegada)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtChegada.subclass:
return dtChegada.subclass(*args_, **kwargs_)
else:
return dtChegada(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtChegada', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtChegada')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtChegada')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtChegada', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtChegada'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtChegada', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtChegada
class classTrabEstrang(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, classTrabEstrang)
if subclass is not None:
return subclass(*args_, **kwargs_)
if classTrabEstrang.subclass:
return classTrabEstrang.subclass(*args_, **kwargs_)
else:
return classTrabEstrang(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='classTrabEstrang', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('classTrabEstrang')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='classTrabEstrang')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='classTrabEstrang', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='classTrabEstrang'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='classTrabEstrang', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class classTrabEstrang
class casadoBr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, casadoBr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if casadoBr.subclass:
return casadoBr.subclass(*args_, **kwargs_)
else:
return casadoBr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='casadoBr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('casadoBr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='casadoBr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='casadoBr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='casadoBr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='casadoBr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class casadoBr
class filhosBr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, filhosBr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if filhosBr.subclass:
return filhosBr.subclass(*args_, **kwargs_)
else:
return filhosBr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='filhosBr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('filhosBr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='filhosBr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='filhosBr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='filhosBr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='filhosBr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class filhosBr
class TDependente(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, tpDep=None, nmDep=None, dtNascto=None, cpfDep=None, depIRRF=None, depSF=None, incTrab=None):
self.original_tagname_ = None
self.tpDep = tpDep
self.nmDep = nmDep
if isinstance(dtNascto, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtNascto, '%Y-%m-%d').date()
else:
initvalue_ = dtNascto
self.dtNascto = initvalue_
self.cpfDep = cpfDep
self.depIRRF = depIRRF
self.depSF = depSF
self.incTrab = incTrab
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TDependente)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TDependente.subclass:
return TDependente.subclass(*args_, **kwargs_)
else:
return TDependente(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpDep(self): return self.tpDep
def set_tpDep(self, tpDep): self.tpDep = tpDep
def get_nmDep(self): return self.nmDep
def set_nmDep(self, nmDep): self.nmDep = nmDep
def get_dtNascto(self): return self.dtNascto
def set_dtNascto(self, dtNascto): self.dtNascto = dtNascto
def get_cpfDep(self): return self.cpfDep
def set_cpfDep(self, cpfDep): self.cpfDep = cpfDep
def get_depIRRF(self): return self.depIRRF
def set_depIRRF(self, depIRRF): self.depIRRF = depIRRF
def get_depSF(self): return self.depSF
def set_depSF(self, depSF): self.depSF = depSF
def get_incTrab(self): return self.incTrab
def set_incTrab(self, incTrab): self.incTrab = incTrab
def hasContent_(self):
if (
self.tpDep is not None or
self.nmDep is not None or
self.dtNascto is not None or
self.cpfDep is not None or
self.depIRRF is not None or
self.depSF is not None or
self.incTrab is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TDependente', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TDependente')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TDependente')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TDependente', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TDependente'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TDependente', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpDep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpDep>%s</%stpDep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.tpDep), input_name='tpDep')), namespace_, eol_))
if self.nmDep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snmDep>%s</%snmDep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmDep), input_name='nmDep')), namespace_, eol_))
if self.dtNascto is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtNascto>%s</%sdtNascto>%s' % (namespace_, self.gds_format_date(self.dtNascto, input_name='dtNascto'), namespace_, eol_))
if self.cpfDep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfDep>%s</%scpfDep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfDep), input_name='cpfDep')), namespace_, eol_))
if self.depIRRF is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdepIRRF>%s</%sdepIRRF>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.depIRRF), input_name='depIRRF')), namespace_, eol_))
if self.depSF is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdepSF>%s</%sdepSF>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.depSF), input_name='depSF')), namespace_, eol_))
if self.incTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sincTrab>%s</%sincTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.incTrab), input_name='incTrab')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpDep':
tpDep_ = child_.text
tpDep_ = self.gds_validate_string(tpDep_, node, 'tpDep')
self.tpDep = tpDep_
elif nodeName_ == 'nmDep':
nmDep_ = child_.text
nmDep_ = self.gds_validate_string(nmDep_, node, 'nmDep')
self.nmDep = nmDep_
elif nodeName_ == 'dtNascto':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtNascto = dval_
elif nodeName_ == 'cpfDep':
cpfDep_ = child_.text
cpfDep_ = self.gds_validate_string(cpfDep_, node, 'cpfDep')
self.cpfDep = cpfDep_
elif nodeName_ == 'depIRRF':
depIRRF_ = child_.text
depIRRF_ = self.gds_validate_string(depIRRF_, node, 'depIRRF')
self.depIRRF = depIRRF_
elif nodeName_ == 'depSF':
depSF_ = child_.text
depSF_ = self.gds_validate_string(depSF_, node, 'depSF')
self.depSF = depSF_
elif nodeName_ == 'incTrab':
incTrab_ = child_.text
incTrab_ = self.gds_validate_string(incTrab_, node, 'incTrab')
self.incTrab = incTrab_
# end class TDependente
class tpDep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpDep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpDep.subclass:
return tpDep.subclass(*args_, **kwargs_)
else:
return tpDep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpDep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpDep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpDep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpDep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpDep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpDep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpDep
class nmDep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nmDep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nmDep.subclass:
return nmDep.subclass(*args_, **kwargs_)
else:
return nmDep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nmDep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmDep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmDep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nmDep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmDep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nmDep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nmDep
class cpfDep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfDep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfDep.subclass:
return cpfDep.subclass(*args_, **kwargs_)
else:
return cpfDep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfDep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfDep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfDep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfDep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfDep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfDep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfDep
class depIRRF(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, depIRRF)
if subclass is not None:
return subclass(*args_, **kwargs_)
if depIRRF.subclass:
return depIRRF.subclass(*args_, **kwargs_)
else:
return depIRRF(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='depIRRF', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('depIRRF')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='depIRRF')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='depIRRF', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='depIRRF'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='depIRRF', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class depIRRF
class depSF(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, depSF)
if subclass is not None:
return subclass(*args_, **kwargs_)
if depSF.subclass:
return depSF.subclass(*args_, **kwargs_)
else:
return depSF(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='depSF', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('depSF')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='depSF')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='depSF', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='depSF'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='depSF', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class depSF
class incTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, incTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if incTrab.subclass:
return incTrab.subclass(*args_, **kwargs_)
else:
return incTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='incTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('incTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='incTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='incTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='incTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='incTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class incTrab
class TContato(GeneratedsSuper):
"""Informações de Contato"""
subclass = None
superclass = None
def __init__(self, fonePrinc=None, foneAlternat=None, emailPrinc=None, emailAlternat=None):
self.original_tagname_ = None
self.fonePrinc = fonePrinc
self.foneAlternat = foneAlternat
self.emailPrinc = emailPrinc
self.emailAlternat = emailAlternat
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TContato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TContato.subclass:
return TContato.subclass(*args_, **kwargs_)
else:
return TContato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_fonePrinc(self): return self.fonePrinc
def set_fonePrinc(self, fonePrinc): self.fonePrinc = fonePrinc
def get_foneAlternat(self): return self.foneAlternat
def set_foneAlternat(self, foneAlternat): self.foneAlternat = foneAlternat
def get_emailPrinc(self): return self.emailPrinc
def set_emailPrinc(self, emailPrinc): self.emailPrinc = emailPrinc
def get_emailAlternat(self): return self.emailAlternat
def set_emailAlternat(self, emailAlternat): self.emailAlternat = emailAlternat
def hasContent_(self):
if (
self.fonePrinc is not None or
self.foneAlternat is not None or
self.emailPrinc is not None or
self.emailAlternat is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TContato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TContato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TContato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TContato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TContato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TContato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.fonePrinc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sfonePrinc>%s</%sfonePrinc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.fonePrinc), input_name='fonePrinc')), namespace_, eol_))
if self.foneAlternat is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sfoneAlternat>%s</%sfoneAlternat>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.foneAlternat), input_name='foneAlternat')), namespace_, eol_))
if self.emailPrinc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%semailPrinc>%s</%semailPrinc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.emailPrinc), input_name='emailPrinc')), namespace_, eol_))
if self.emailAlternat is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%semailAlternat>%s</%semailAlternat>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.emailAlternat), input_name='emailAlternat')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'fonePrinc':
fonePrinc_ = child_.text
fonePrinc_ = self.gds_validate_string(fonePrinc_, node, 'fonePrinc')
self.fonePrinc = fonePrinc_
elif nodeName_ == 'foneAlternat':
foneAlternat_ = child_.text
foneAlternat_ = self.gds_validate_string(foneAlternat_, node, 'foneAlternat')
self.foneAlternat = foneAlternat_
elif nodeName_ == 'emailPrinc':
emailPrinc_ = child_.text
emailPrinc_ = self.gds_validate_string(emailPrinc_, node, 'emailPrinc')
self.emailPrinc = emailPrinc_
elif nodeName_ == 'emailAlternat':
emailAlternat_ = child_.text
emailAlternat_ = self.gds_validate_string(emailAlternat_, node, 'emailAlternat')
self.emailAlternat = emailAlternat_
# end class TContato
class fonePrinc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, fonePrinc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if fonePrinc.subclass:
return fonePrinc.subclass(*args_, **kwargs_)
else:
return fonePrinc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='fonePrinc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('fonePrinc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='fonePrinc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='fonePrinc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='fonePrinc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='fonePrinc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class fonePrinc
class foneAlternat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, foneAlternat)
if subclass is not None:
return subclass(*args_, **kwargs_)
if foneAlternat.subclass:
return foneAlternat.subclass(*args_, **kwargs_)
else:
return foneAlternat(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='foneAlternat', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('foneAlternat')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='foneAlternat')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='foneAlternat', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='foneAlternat'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='foneAlternat', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class foneAlternat
class emailPrinc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, emailPrinc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if emailPrinc.subclass:
return emailPrinc.subclass(*args_, **kwargs_)
else:
return emailPrinc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='emailPrinc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('emailPrinc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='emailPrinc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='emailPrinc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='emailPrinc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='emailPrinc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class emailPrinc
class emailAlternat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, emailAlternat)
if subclass is not None:
return subclass(*args_, **kwargs_)
if emailAlternat.subclass:
return emailAlternat.subclass(*args_, **kwargs_)
else:
return emailAlternat(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='emailAlternat', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('emailAlternat')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='emailAlternat')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='emailAlternat', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='emailAlternat'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='emailAlternat', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class emailAlternat
class TFgts(GeneratedsSuper):
"""Informações do FGTS"""
subclass = None
superclass = None
def __init__(self, opcFGTS=None, dtOpcFGTS=None):
self.original_tagname_ = None
self.opcFGTS = opcFGTS
if isinstance(dtOpcFGTS, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtOpcFGTS, '%Y-%m-%d').date()
else:
initvalue_ = dtOpcFGTS
self.dtOpcFGTS = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TFgts)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TFgts.subclass:
return TFgts.subclass(*args_, **kwargs_)
else:
return TFgts(*args_, **kwargs_)
factory = staticmethod(factory)
def get_opcFGTS(self): return self.opcFGTS
def set_opcFGTS(self, opcFGTS): self.opcFGTS = opcFGTS
def get_dtOpcFGTS(self): return self.dtOpcFGTS
def set_dtOpcFGTS(self, dtOpcFGTS): self.dtOpcFGTS = dtOpcFGTS
def hasContent_(self):
if (
self.opcFGTS is not None or
self.dtOpcFGTS is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TFgts', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TFgts')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TFgts')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TFgts', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TFgts'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TFgts', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.opcFGTS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sopcFGTS>%s</%sopcFGTS>%s' % (namespace_, self.gds_format_integer(self.opcFGTS, input_name='opcFGTS'), namespace_, eol_))
if self.dtOpcFGTS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtOpcFGTS>%s</%sdtOpcFGTS>%s' % (namespace_, self.gds_format_date(self.dtOpcFGTS, input_name='dtOpcFGTS'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'opcFGTS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'opcFGTS')
self.opcFGTS = ival_
elif nodeName_ == 'dtOpcFGTS':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtOpcFGTS = dval_
# end class TFgts
class opcFGTS(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, opcFGTS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if opcFGTS.subclass:
return opcFGTS.subclass(*args_, **kwargs_)
else:
return opcFGTS(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='opcFGTS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('opcFGTS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='opcFGTS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='opcFGTS', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='opcFGTS'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='opcFGTS', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class opcFGTS
class dtOpcFGTS(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtOpcFGTS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtOpcFGTS.subclass:
return dtOpcFGTS.subclass(*args_, **kwargs_)
else:
return dtOpcFGTS(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtOpcFGTS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtOpcFGTS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtOpcFGTS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtOpcFGTS', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtOpcFGTS'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtOpcFGTS', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtOpcFGTS
class TDadosContrato(GeneratedsSuper):
"""Informações do Contrato de Trabalho"""
subclass = None
superclass = None
def __init__(self, codCargo=None, codFuncao=None, codCateg=None, codCarreira=None, dtIngrCarr=None, remuneracao=None, duracao=None, localTrabalho=None, horContratual=None, filiacaoSindical=None, alvaraJudicial=None, observacoes=None):
self.original_tagname_ = None
self.codCargo = codCargo
self.codFuncao = codFuncao
self.codCateg = codCateg
self.codCarreira = codCarreira
if isinstance(dtIngrCarr, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtIngrCarr, '%Y-%m-%d').date()
else:
initvalue_ = dtIngrCarr
self.dtIngrCarr = initvalue_
self.remuneracao = remuneracao
self.duracao = duracao
self.localTrabalho = localTrabalho
self.horContratual = horContratual
if filiacaoSindical is None:
self.filiacaoSindical = []
else:
self.filiacaoSindical = filiacaoSindical
self.alvaraJudicial = alvaraJudicial
if observacoes is None:
self.observacoes = []
else:
self.observacoes = observacoes
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TDadosContrato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TDadosContrato.subclass:
return TDadosContrato.subclass(*args_, **kwargs_)
else:
return TDadosContrato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_codCargo(self): return self.codCargo
def set_codCargo(self, codCargo): self.codCargo = codCargo
def get_codFuncao(self): return self.codFuncao
def set_codFuncao(self, codFuncao): self.codFuncao = codFuncao
def get_codCateg(self): return self.codCateg
def set_codCateg(self, codCateg): self.codCateg = codCateg
def get_codCarreira(self): return self.codCarreira
def set_codCarreira(self, codCarreira): self.codCarreira = codCarreira
def get_dtIngrCarr(self): return self.dtIngrCarr
def set_dtIngrCarr(self, dtIngrCarr): self.dtIngrCarr = dtIngrCarr
def get_remuneracao(self): return self.remuneracao
def set_remuneracao(self, remuneracao): self.remuneracao = remuneracao
def get_duracao(self): return self.duracao
def set_duracao(self, duracao): self.duracao = duracao
def get_localTrabalho(self): return self.localTrabalho
def set_localTrabalho(self, localTrabalho): self.localTrabalho = localTrabalho
def get_horContratual(self): return self.horContratual
def set_horContratual(self, horContratual): self.horContratual = horContratual
def get_filiacaoSindical(self): return self.filiacaoSindical
def set_filiacaoSindical(self, filiacaoSindical): self.filiacaoSindical = filiacaoSindical
def add_filiacaoSindical(self, value): self.filiacaoSindical.append(value)
def insert_filiacaoSindical_at(self, index, value): self.filiacaoSindical.insert(index, value)
def replace_filiacaoSindical_at(self, index, value): self.filiacaoSindical[index] = value
def get_alvaraJudicial(self): return self.alvaraJudicial
def set_alvaraJudicial(self, alvaraJudicial): self.alvaraJudicial = alvaraJudicial
def get_observacoes(self): return self.observacoes
def set_observacoes(self, observacoes): self.observacoes = observacoes
def add_observacoes(self, value): self.observacoes.append(value)
def insert_observacoes_at(self, index, value): self.observacoes.insert(index, value)
def replace_observacoes_at(self, index, value): self.observacoes[index] = value
def hasContent_(self):
if (
self.codCargo is not None or
self.codFuncao is not None or
self.codCateg is not None or
self.codCarreira is not None or
self.dtIngrCarr is not None or
self.remuneracao is not None or
self.duracao is not None or
self.localTrabalho is not None or
self.horContratual is not None or
self.filiacaoSindical or
self.alvaraJudicial is not None or
self.observacoes
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TDadosContrato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TDadosContrato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TDadosContrato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TDadosContrato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TDadosContrato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TDadosContrato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.codCargo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodCargo>%s</%scodCargo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codCargo), input_name='codCargo')), namespace_, eol_))
if self.codFuncao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodFuncao>%s</%scodFuncao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codFuncao), input_name='codFuncao')), namespace_, eol_))
if self.codCateg is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodCateg>%s</%scodCateg>%s' % (namespace_, self.gds_format_integer(self.codCateg, input_name='codCateg'), namespace_, eol_))
if self.codCarreira is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodCarreira>%s</%scodCarreira>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codCarreira), input_name='codCarreira')), namespace_, eol_))
if self.dtIngrCarr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtIngrCarr>%s</%sdtIngrCarr>%s' % (namespace_, self.gds_format_date(self.dtIngrCarr, input_name='dtIngrCarr'), namespace_, eol_))
if self.remuneracao is not None:
self.remuneracao.export(outfile, level, namespace_, name_='remuneracao', pretty_print=pretty_print)
if self.duracao is not None:
self.duracao.export(outfile, level, namespace_, name_='duracao', pretty_print=pretty_print)
if self.localTrabalho is not None:
self.localTrabalho.export(outfile, level, namespace_, name_='localTrabalho', pretty_print=pretty_print)
if self.horContratual is not None:
self.horContratual.export(outfile, level, namespace_, name_='horContratual', pretty_print=pretty_print)
for filiacaoSindical_ in self.filiacaoSindical:
filiacaoSindical_.export(outfile, level, namespace_, name_='filiacaoSindical', pretty_print=pretty_print)
if self.alvaraJudicial is not None:
self.alvaraJudicial.export(outfile, level, namespace_, name_='alvaraJudicial', pretty_print=pretty_print)
for observacoes_ in self.observacoes:
observacoes_.export(outfile, level, namespace_, name_='observacoes', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'codCargo':
codCargo_ = child_.text
codCargo_ = self.gds_validate_string(codCargo_, node, 'codCargo')
self.codCargo = codCargo_
elif nodeName_ == 'codFuncao':
codFuncao_ = child_.text
codFuncao_ = self.gds_validate_string(codFuncao_, node, 'codFuncao')
self.codFuncao = codFuncao_
elif nodeName_ == 'codCateg':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'codCateg')
self.codCateg = ival_
elif nodeName_ == 'codCarreira':
codCarreira_ = child_.text
codCarreira_ = self.gds_validate_string(codCarreira_, node, 'codCarreira')
self.codCarreira = codCarreira_
elif nodeName_ == 'dtIngrCarr':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtIngrCarr = dval_
elif nodeName_ == 'remuneracao':
obj_ = TRemun.factory()
obj_.build(child_)
self.remuneracao = obj_
obj_.original_tagname_ = 'remuneracao'
elif nodeName_ == 'duracao':
obj_ = duracao.factory()
obj_.build(child_)
self.duracao = obj_
obj_.original_tagname_ = 'duracao'
elif nodeName_ == 'localTrabalho':
obj_ = localTrabalho.factory()
obj_.build(child_)
self.localTrabalho = obj_
obj_.original_tagname_ = 'localTrabalho'
elif nodeName_ == 'horContratual':
obj_ = horContratual.factory()
obj_.build(child_)
self.horContratual = obj_
obj_.original_tagname_ = 'horContratual'
elif nodeName_ == 'filiacaoSindical':
obj_ = filiacaoSindical.factory()
obj_.build(child_)
self.filiacaoSindical.append(obj_)
obj_.original_tagname_ = 'filiacaoSindical'
elif nodeName_ == 'alvaraJudicial':
obj_ = alvaraJudicial.factory()
obj_.build(child_)
self.alvaraJudicial = obj_
obj_.original_tagname_ = 'alvaraJudicial'
elif nodeName_ == 'observacoes':
obj_ = observacoes.factory()
obj_.build(child_)
self.observacoes.append(obj_)
obj_.original_tagname_ = 'observacoes'
# end class TDadosContrato
class codCargo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codCargo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codCargo.subclass:
return codCargo.subclass(*args_, **kwargs_)
else:
return codCargo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codCargo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codCargo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codCargo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codCargo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codCargo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codCargo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codCargo
class codFuncao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codFuncao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codFuncao.subclass:
return codFuncao.subclass(*args_, **kwargs_)
else:
return codFuncao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codFuncao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codFuncao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codFuncao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codFuncao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codFuncao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codFuncao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codFuncao
class codCateg(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codCateg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codCateg.subclass:
return codCateg.subclass(*args_, **kwargs_)
else:
return codCateg(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codCateg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codCateg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codCateg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codCateg', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codCateg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codCateg', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codCateg
class codCarreira(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codCarreira)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codCarreira.subclass:
return codCarreira.subclass(*args_, **kwargs_)
else:
return codCarreira(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codCarreira', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codCarreira')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codCarreira')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codCarreira', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codCarreira'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codCarreira', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codCarreira
class dtIngrCarr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtIngrCarr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtIngrCarr.subclass:
return dtIngrCarr.subclass(*args_, **kwargs_)
else:
return dtIngrCarr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtIngrCarr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtIngrCarr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtIngrCarr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtIngrCarr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtIngrCarr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtIngrCarr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtIngrCarr
class duracao(GeneratedsSuper):
"""Duração do Contrato de Trabalho"""
subclass = None
superclass = None
def __init__(self, tpContr=None, dtTerm=None, clauAsseg=None):
self.original_tagname_ = None
self.tpContr = tpContr
if isinstance(dtTerm, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtTerm, '%Y-%m-%d').date()
else:
initvalue_ = dtTerm
self.dtTerm = initvalue_
self.clauAsseg = clauAsseg
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, duracao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if duracao.subclass:
return duracao.subclass(*args_, **kwargs_)
else:
return duracao(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpContr(self): return self.tpContr
def set_tpContr(self, tpContr): self.tpContr = tpContr
def get_dtTerm(self): return self.dtTerm
def set_dtTerm(self, dtTerm): self.dtTerm = dtTerm
def get_clauAsseg(self): return self.clauAsseg
def set_clauAsseg(self, clauAsseg): self.clauAsseg = clauAsseg
def hasContent_(self):
if (
self.tpContr is not None or
self.dtTerm is not None or
self.clauAsseg is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='duracao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('duracao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='duracao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='duracao', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='duracao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='duracao', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpContr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpContr>%s</%stpContr>%s' % (namespace_, self.gds_format_integer(self.tpContr, input_name='tpContr'), namespace_, eol_))
if self.dtTerm is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtTerm>%s</%sdtTerm>%s' % (namespace_, self.gds_format_date(self.dtTerm, input_name='dtTerm'), namespace_, eol_))
if self.clauAsseg is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sclauAsseg>%s</%sclauAsseg>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.clauAsseg), input_name='clauAsseg')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpContr':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpContr')
self.tpContr = ival_
elif nodeName_ == 'dtTerm':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtTerm = dval_
elif nodeName_ == 'clauAsseg':
clauAsseg_ = child_.text
clauAsseg_ = self.gds_validate_string(clauAsseg_, node, 'clauAsseg')
self.clauAsseg = clauAsseg_
# end class duracao
class tpContr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpContr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpContr.subclass:
return tpContr.subclass(*args_, **kwargs_)
else:
return tpContr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpContr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpContr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpContr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpContr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpContr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpContr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpContr
class dtTerm(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtTerm)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtTerm.subclass:
return dtTerm.subclass(*args_, **kwargs_)
else:
return dtTerm(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtTerm', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtTerm')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtTerm')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtTerm', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtTerm'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtTerm', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtTerm
class clauAsseg(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, clauAsseg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if clauAsseg.subclass:
return clauAsseg.subclass(*args_, **kwargs_)
else:
return clauAsseg(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='clauAsseg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('clauAsseg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='clauAsseg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='clauAsseg', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='clauAsseg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='clauAsseg', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class clauAsseg
class localTrabalho(GeneratedsSuper):
"""Informações do local de trabalho"""
subclass = None
superclass = None
def __init__(self, localTrabGeral=None, localTrabDom=None):
self.original_tagname_ = None
self.localTrabGeral = localTrabGeral
self.localTrabDom = localTrabDom
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, localTrabalho)
if subclass is not None:
return subclass(*args_, **kwargs_)
if localTrabalho.subclass:
return localTrabalho.subclass(*args_, **kwargs_)
else:
return localTrabalho(*args_, **kwargs_)
factory = staticmethod(factory)
def get_localTrabGeral(self): return self.localTrabGeral
def set_localTrabGeral(self, localTrabGeral): self.localTrabGeral = localTrabGeral
def get_localTrabDom(self): return self.localTrabDom
def set_localTrabDom(self, localTrabDom): self.localTrabDom = localTrabDom
def hasContent_(self):
if (
self.localTrabGeral is not None or
self.localTrabDom is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='localTrabalho', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('localTrabalho')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='localTrabalho')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='localTrabalho', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='localTrabalho'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='localTrabalho', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.localTrabGeral is not None:
self.localTrabGeral.export(outfile, level, namespace_, name_='localTrabGeral', pretty_print=pretty_print)
if self.localTrabDom is not None:
self.localTrabDom.export(outfile, level, namespace_, name_='localTrabDom', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'localTrabGeral':
obj_ = TLocalTrab.factory()
obj_.build(child_)
self.localTrabGeral = obj_
obj_.original_tagname_ = 'localTrabGeral'
elif nodeName_ == 'localTrabDom':
obj_ = TEnderecoBrasil.factory()
obj_.build(child_)
self.localTrabDom = obj_
obj_.original_tagname_ = 'localTrabDom'
# end class localTrabalho
class horContratual(GeneratedsSuper):
"""Informações do Horário Contratual do Trabalhador. O preenchimento é
obrigatório se {tpRegJor} = [1]."""
subclass = None
superclass = None
def __init__(self, qtdHrsSem=None, tpJornada=None, dscTpJorn=None, tmpParc=None, horario=None):
self.original_tagname_ = None
self.qtdHrsSem = qtdHrsSem
self.tpJornada = tpJornada
self.dscTpJorn = dscTpJorn
self.tmpParc = tmpParc
if horario is None:
self.horario = []
else:
self.horario = horario
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, horContratual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if horContratual.subclass:
return horContratual.subclass(*args_, **kwargs_)
else:
return horContratual(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qtdHrsSem(self): return self.qtdHrsSem
def set_qtdHrsSem(self, qtdHrsSem): self.qtdHrsSem = qtdHrsSem
def get_tpJornada(self): return self.tpJornada
def set_tpJornada(self, tpJornada): self.tpJornada = tpJornada
def get_dscTpJorn(self): return self.dscTpJorn
def set_dscTpJorn(self, dscTpJorn): self.dscTpJorn = dscTpJorn
def get_tmpParc(self): return self.tmpParc
def set_tmpParc(self, tmpParc): self.tmpParc = tmpParc
def get_horario(self): return self.horario
def set_horario(self, horario): self.horario = horario
def add_horario(self, value): self.horario.append(value)
def insert_horario_at(self, index, value): self.horario.insert(index, value)
def replace_horario_at(self, index, value): self.horario[index] = value
def hasContent_(self):
if (
self.qtdHrsSem is not None or
self.tpJornada is not None or
self.dscTpJorn is not None or
self.tmpParc is not None or
self.horario
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='horContratual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('horContratual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='horContratual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='horContratual', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='horContratual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='horContratual', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.qtdHrsSem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sqtdHrsSem>%s</%sqtdHrsSem>%s' % (namespace_, self.gds_format_float(self.qtdHrsSem, input_name='qtdHrsSem'), namespace_, eol_))
if self.tpJornada is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpJornada>%s</%stpJornada>%s' % (namespace_, self.gds_format_integer(self.tpJornada, input_name='tpJornada'), namespace_, eol_))
if self.dscTpJorn is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscTpJorn>%s</%sdscTpJorn>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscTpJorn), input_name='dscTpJorn')), namespace_, eol_))
if self.tmpParc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stmpParc>%s</%stmpParc>%s' % (namespace_, self.gds_format_integer(self.tmpParc, input_name='tmpParc'), namespace_, eol_))
for horario_ in self.horario:
horario_.export(outfile, level, namespace_, name_='horario', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'qtdHrsSem':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'qtdHrsSem')
self.qtdHrsSem = fval_
elif nodeName_ == 'tpJornada':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpJornada')
self.tpJornada = ival_
elif nodeName_ == 'dscTpJorn':
dscTpJorn_ = child_.text
dscTpJorn_ = self.gds_validate_string(dscTpJorn_, node, 'dscTpJorn')
self.dscTpJorn = dscTpJorn_
elif nodeName_ == 'tmpParc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tmpParc')
self.tmpParc = ival_
elif nodeName_ == 'horario':
obj_ = THorario.factory()
obj_.build(child_)
self.horario.append(obj_)
obj_.original_tagname_ = 'horario'
# end class horContratual
class qtdHrsSem(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, qtdHrsSem)
if subclass is not None:
return subclass(*args_, **kwargs_)
if qtdHrsSem.subclass:
return qtdHrsSem.subclass(*args_, **kwargs_)
else:
return qtdHrsSem(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='qtdHrsSem', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('qtdHrsSem')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qtdHrsSem')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='qtdHrsSem', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='qtdHrsSem'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='qtdHrsSem', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qtdHrsSem
class tpJornada(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpJornada)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpJornada.subclass:
return tpJornada.subclass(*args_, **kwargs_)
else:
return tpJornada(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpJornada', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpJornada')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpJornada')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpJornada', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpJornada'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpJornada', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpJornada
class dscTpJorn(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscTpJorn)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscTpJorn.subclass:
return dscTpJorn.subclass(*args_, **kwargs_)
else:
return dscTpJorn(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscTpJorn', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscTpJorn')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscTpJorn')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscTpJorn', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscTpJorn'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscTpJorn', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscTpJorn
class tmpParc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tmpParc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tmpParc.subclass:
return tmpParc.subclass(*args_, **kwargs_)
else:
return tmpParc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tmpParc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tmpParc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tmpParc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tmpParc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tmpParc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tmpParc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tmpParc
class filiacaoSindical(GeneratedsSuper):
"""Filiação Sindical do Trabalhador"""
subclass = None
superclass = None
def __init__(self, cnpjSindTrab=None):
self.original_tagname_ = None
self.cnpjSindTrab = cnpjSindTrab
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, filiacaoSindical)
if subclass is not None:
return subclass(*args_, **kwargs_)
if filiacaoSindical.subclass:
return filiacaoSindical.subclass(*args_, **kwargs_)
else:
return filiacaoSindical(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cnpjSindTrab(self): return self.cnpjSindTrab
def set_cnpjSindTrab(self, cnpjSindTrab): self.cnpjSindTrab = cnpjSindTrab
def hasContent_(self):
if (
self.cnpjSindTrab is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='filiacaoSindical', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('filiacaoSindical')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='filiacaoSindical')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='filiacaoSindical', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='filiacaoSindical'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='filiacaoSindical', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cnpjSindTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scnpjSindTrab>%s</%scnpjSindTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cnpjSindTrab), input_name='cnpjSindTrab')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cnpjSindTrab':
cnpjSindTrab_ = child_.text
cnpjSindTrab_ = self.gds_validate_string(cnpjSindTrab_, node, 'cnpjSindTrab')
self.cnpjSindTrab = cnpjSindTrab_
# end class filiacaoSindical
class cnpjSindTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cnpjSindTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cnpjSindTrab.subclass:
return cnpjSindTrab.subclass(*args_, **kwargs_)
else:
return cnpjSindTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cnpjSindTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cnpjSindTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cnpjSindTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cnpjSindTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cnpjSindTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cnpjSindTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cnpjSindTrab
class alvaraJudicial(GeneratedsSuper):
"""Informações do alvará judicial em caso de contratação de menores de
14 anos, em qualquer categoria, e de maiores de 14 e menores de
16, em categoria diferente de "Aprendiz"."""
subclass = None
superclass = None
def __init__(self, nrProcJud=None):
self.original_tagname_ = None
self.nrProcJud = nrProcJud
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, alvaraJudicial)
if subclass is not None:
return subclass(*args_, **kwargs_)
if alvaraJudicial.subclass:
return alvaraJudicial.subclass(*args_, **kwargs_)
else:
return alvaraJudicial(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrProcJud(self): return self.nrProcJud
def set_nrProcJud(self, nrProcJud): self.nrProcJud = nrProcJud
def hasContent_(self):
if (
self.nrProcJud is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='alvaraJudicial', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('alvaraJudicial')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='alvaraJudicial')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='alvaraJudicial', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='alvaraJudicial'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='alvaraJudicial', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrProcJud is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrProcJud>%s</%snrProcJud>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrProcJud), input_name='nrProcJud')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrProcJud':
nrProcJud_ = child_.text
nrProcJud_ = self.gds_validate_string(nrProcJud_, node, 'nrProcJud')
self.nrProcJud = nrProcJud_
# end class alvaraJudicial
class observacoes(GeneratedsSuper):
"""Observações do contrato de trabalho"""
subclass = None
superclass = None
def __init__(self, observacao=None):
self.original_tagname_ = None
self.observacao = observacao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, observacoes)
if subclass is not None:
return subclass(*args_, **kwargs_)
if observacoes.subclass:
return observacoes.subclass(*args_, **kwargs_)
else:
return observacoes(*args_, **kwargs_)
factory = staticmethod(factory)
def get_observacao(self): return self.observacao
def set_observacao(self, observacao): self.observacao = observacao
def hasContent_(self):
if (
self.observacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='observacoes', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('observacoes')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='observacoes')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='observacoes', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='observacoes'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='observacoes', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.observacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sobservacao>%s</%sobservacao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.observacao), input_name='observacao')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'observacao':
observacao_ = child_.text
observacao_ = self.gds_validate_string(observacao_, node, 'observacao')
self.observacao = observacao_
# end class observacoes
class TRemun(GeneratedsSuper):
"""Remuneração e periodicidade de pagamento"""
subclass = None
superclass = None
def __init__(self, vrSalFx=None, undSalFixo=None, dscSalVar=None):
self.original_tagname_ = None
self.vrSalFx = vrSalFx
self.undSalFixo = undSalFixo
self.dscSalVar = dscSalVar
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRemun)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRemun.subclass:
return TRemun.subclass(*args_, **kwargs_)
else:
return TRemun(*args_, **kwargs_)
factory = staticmethod(factory)
def get_vrSalFx(self): return self.vrSalFx
def set_vrSalFx(self, vrSalFx): self.vrSalFx = vrSalFx
def get_undSalFixo(self): return self.undSalFixo
def set_undSalFixo(self, undSalFixo): self.undSalFixo = undSalFixo
def get_dscSalVar(self): return self.dscSalVar
def set_dscSalVar(self, dscSalVar): self.dscSalVar = dscSalVar
def hasContent_(self):
if (
self.vrSalFx is not None or
self.undSalFixo is not None or
self.dscSalVar is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRemun', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRemun')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRemun')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRemun', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRemun'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRemun', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.vrSalFx is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%svrSalFx>%s</%svrSalFx>%s' % (namespace_, self.gds_format_float(self.vrSalFx, input_name='vrSalFx'), namespace_, eol_))
if self.undSalFixo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sundSalFixo>%s</%sundSalFixo>%s' % (namespace_, self.gds_format_integer(self.undSalFixo, input_name='undSalFixo'), namespace_, eol_))
if self.dscSalVar is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscSalVar>%s</%sdscSalVar>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscSalVar), input_name='dscSalVar')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'vrSalFx':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'vrSalFx')
self.vrSalFx = fval_
elif nodeName_ == 'undSalFixo':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'undSalFixo')
self.undSalFixo = ival_
elif nodeName_ == 'dscSalVar':
dscSalVar_ = child_.text
dscSalVar_ = self.gds_validate_string(dscSalVar_, node, 'dscSalVar')
self.dscSalVar = dscSalVar_
# end class TRemun
class vrSalFx(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, vrSalFx)
if subclass is not None:
return subclass(*args_, **kwargs_)
if vrSalFx.subclass:
return vrSalFx.subclass(*args_, **kwargs_)
else:
return vrSalFx(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='vrSalFx', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('vrSalFx')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='vrSalFx')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='vrSalFx', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='vrSalFx'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='vrSalFx', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class vrSalFx
class undSalFixo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, undSalFixo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if undSalFixo.subclass:
return undSalFixo.subclass(*args_, **kwargs_)
else:
return undSalFixo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='undSalFixo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('undSalFixo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='undSalFixo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='undSalFixo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='undSalFixo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='undSalFixo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class undSalFixo
class dscSalVar(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscSalVar)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscSalVar.subclass:
return dscSalVar.subclass(*args_, **kwargs_)
else:
return dscSalVar(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscSalVar', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscSalVar')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscSalVar')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscSalVar', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscSalVar'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscSalVar', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscSalVar
class TLocalTrab(GeneratedsSuper):
"""Informações do Local de Trabalho"""
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None, descComp=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
self.descComp = descComp
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TLocalTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TLocalTrab.subclass:
return TLocalTrab.subclass(*args_, **kwargs_)
else:
return TLocalTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def get_descComp(self): return self.descComp
def set_descComp(self, descComp): self.descComp = descComp
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None or
self.descComp is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TLocalTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TLocalTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TLocalTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TLocalTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TLocalTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TLocalTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
if self.descComp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdescComp>%s</%sdescComp>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.descComp), input_name='descComp')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
elif nodeName_ == 'descComp':
descComp_ = child_.text
descComp_ = self.gds_validate_string(descComp_, node, 'descComp')
self.descComp = descComp_
# end class TLocalTrab
class descComp(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, descComp)
if subclass is not None:
return subclass(*args_, **kwargs_)
if descComp.subclass:
return descComp.subclass(*args_, **kwargs_)
else:
return descComp(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='descComp', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('descComp')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='descComp')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='descComp', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='descComp'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='descComp', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class descComp
class THorario(GeneratedsSuper):
"""Informações de Horário Contratual"""
subclass = None
superclass = None
def __init__(self, dia=None, codHorContrat=None):
self.original_tagname_ = None
self.dia = dia
self.codHorContrat = codHorContrat
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, THorario)
if subclass is not None:
return subclass(*args_, **kwargs_)
if THorario.subclass:
return THorario.subclass(*args_, **kwargs_)
else:
return THorario(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dia(self): return self.dia
def set_dia(self, dia): self.dia = dia
def get_codHorContrat(self): return self.codHorContrat
def set_codHorContrat(self, codHorContrat): self.codHorContrat = codHorContrat
def hasContent_(self):
if (
self.dia is not None or
self.codHorContrat is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='THorario', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('THorario')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='THorario')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='THorario', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='THorario'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='THorario', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdia>%s</%sdia>%s' % (namespace_, self.gds_format_integer(self.dia, input_name='dia'), namespace_, eol_))
if self.codHorContrat is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodHorContrat>%s</%scodHorContrat>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codHorContrat), input_name='codHorContrat')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dia':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'dia')
self.dia = ival_
elif nodeName_ == 'codHorContrat':
codHorContrat_ = child_.text
codHorContrat_ = self.gds_validate_string(codHorContrat_, node, 'codHorContrat')
self.codHorContrat = codHorContrat_
# end class THorario
class dia(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dia)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dia.subclass:
return dia.subclass(*args_, **kwargs_)
else:
return dia(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dia', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dia')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dia')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dia', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dia'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dia', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dia
class codHorContrat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codHorContrat)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codHorContrat.subclass:
return codHorContrat.subclass(*args_, **kwargs_)
else:
return codHorContrat(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codHorContrat', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codHorContrat')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codHorContrat')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codHorContrat', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codHorContrat'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codHorContrat', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codHorContrat
GDSClassesMapping = {
'CNH': TCnh,
'CTPS': TCtps,
'FGTS': TFgts,
'OC': TOc,
'RG': TRg,
'RIC': TRic,
'RNE': TRne,
'brasil': TEnderecoBrasil,
'contato': TContato,
'dependente': TDependente,
'exterior': TEnderecoExterior,
'horario': THorario,
'ideEmpregador': TEmpregador,
'ideEvento': TIdeEveTrab,
'infoContrato': TDadosContrato,
'localTrabDom': TEnderecoBrasil,
'localTrabGeral': TLocalTrab,
'remuneracao': TRemun,
'trabEstrangeiro': TTrabEstrang,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from evtAdmissao import *\n\n')
sys.stdout.write('import evtAdmissao as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"TCnh",
"TContato",
"TCtps",
"TDadosContrato",
"TDependente",
"TEmpregador",
"TEnderecoBrasil",
"TEnderecoExterior",
"TFgts",
"THorario",
"TIdeEveTrab",
"TLocalTrab",
"TOc",
"TRemun",
"TRg",
"TRic",
"TRne",
"TTrabEstrang",
"eSocial"
]
| 43.120826
| 291
| 0.621537
| 60,158
| 584,934
| 5.756824
| 0.015975
| 0.042435
| 0.027573
| 0.031945
| 0.81686
| 0.778753
| 0.760201
| 0.733055
| 0.70744
| 0.6743
| 0
| 0.001738
| 0.272032
| 584,934
| 13,564
| 292
| 43.124005
| 0.811575
| 0.015988
| 0
| 0.701029
| 1
| 0.000079
| 0.043354
| 0.007794
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156125
| false
| 0.044944
| 0.040387
| 0.01815
| 0.334329
| 0.088316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4f3b1038c403cb3e14fd0d23ee3586cbcd2eeeda
| 27
|
py
|
Python
|
pyefun/encoding/ebinary/__init__.py
|
nuo010/pyefun
|
c1c4dfcfd382a67df005a66958da95aa13c30686
|
[
"Apache-2.0"
] | 94
|
2021-05-19T04:09:29.000Z
|
2022-03-27T04:02:30.000Z
|
pyefun/encoding/ebinary/__init__.py
|
1431241631/pyefun
|
ac2290d4bcc8de16c195d2782f3eacd26e5e6ed4
|
[
"Apache-2.0"
] | 11
|
2021-05-22T06:44:19.000Z
|
2021-12-27T11:16:06.000Z
|
pyefun/encoding/ebinary/__init__.py
|
1431241631/pyefun
|
ac2290d4bcc8de16c195d2782f3eacd26e5e6ed4
|
[
"Apache-2.0"
] | 21
|
2021-05-22T21:08:09.000Z
|
2022-02-24T02:39:06.000Z
|
from .binary import *
| 3.857143
| 21
| 0.592593
| 3
| 27
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 27
| 6
| 22
| 4.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4f4565fcde0d2c2c71603c094bff0b39921c8a9b
| 353
|
py
|
Python
|
Utilities/MessageUtilities/SwarmAnalyticsUtility/MessageInterface/Enums/__init__.py
|
IshmaGurca/SwarmAnalytics
|
81688dac4d32d568cfac218224a45cbf048b2af9
|
[
"MIT"
] | null | null | null |
Utilities/MessageUtilities/SwarmAnalyticsUtility/MessageInterface/Enums/__init__.py
|
IshmaGurca/SwarmAnalytics
|
81688dac4d32d568cfac218224a45cbf048b2af9
|
[
"MIT"
] | null | null | null |
Utilities/MessageUtilities/SwarmAnalyticsUtility/MessageInterface/Enums/__init__.py
|
IshmaGurca/SwarmAnalytics
|
81688dac4d32d568cfac218224a45cbf048b2af9
|
[
"MIT"
] | null | null | null |
from SwarmAnalyticsUtility.MessageInterface.Enums.ChatNumber import *
from SwarmAnalyticsUtility.MessageInterface.Enums.ChatType import *
from SwarmAnalyticsUtility.MessageInterface.Enums.FeedbackReward import *
from SwarmAnalyticsUtility.MessageInterface.Enums.MessageText import *
from SwarmAnalyticsUtility.MessageInterface.Enums.MessageType import *
| 70.6
| 73
| 0.889518
| 30
| 353
| 10.466667
| 0.333333
| 0.398089
| 0.652866
| 0.732484
| 0.66242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053824
| 353
| 5
| 74
| 70.6
| 0.94012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4f76d6773ee27899ea577fe1ab800fe4087aff00
| 147
|
py
|
Python
|
tests/parser/choice.33.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/choice.33.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/choice.33.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
a | b :- c.
d :- not c.
c :- not d.
:- d, not c.
"""
output = """
a | b :- c.
d :- not c.
c :- not d.
:- d, not c.
"""
| 8.647059
| 13
| 0.319728
| 26
| 147
| 1.807692
| 0.269231
| 0.340426
| 0.425532
| 0.170213
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0
| 0
| 0.394558
| 147
| 16
| 14
| 9.1875
| 0.52809
| 0
| 0
| 0.833333
| 0
| 0
| 0.77037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
4fa39e6dd84a63244cd81999324e6a23e63809a8
| 3,362
|
py
|
Python
|
tests/test_stopwatch.py
|
TechnoJays/robot2018
|
9b5e355d79642e9b5998031872ec0ee2f1a6f08d
|
[
"MIT"
] | null | null | null |
tests/test_stopwatch.py
|
TechnoJays/robot2018
|
9b5e355d79642e9b5998031872ec0ee2f1a6f08d
|
[
"MIT"
] | 17
|
2018-02-15T21:55:35.000Z
|
2018-02-27T22:51:13.000Z
|
tests/test_stopwatch.py
|
TechnoJays/robot2018
|
9b5e355d79642e9b5998031872ec0ee2f1a6f08d
|
[
"MIT"
] | 1
|
2018-02-10T18:43:44.000Z
|
2018-02-10T18:43:44.000Z
|
import pytest
from stopwatch import Stopwatch
@pytest.fixture(scope="function")
def stopwatch_default(robot):
return Stopwatch()
def test_stopwatch_default(stopwatch_default):
assert stopwatch_default is not None
assert stopwatch_default._start is None
assert stopwatch_default._end is None
assert stopwatch_default._secs is None
assert stopwatch_default._msecs is None
assert stopwatch_default._running is False
def test_start(stopwatch_default):
stopwatch_default.start()
assert stopwatch_default._start is not None
assert stopwatch_default._running is True
assert stopwatch_default._end is None
assert stopwatch_default._secs is None
assert stopwatch_default._msecs is None
def test_reset(stopwatch_default):
stopwatch_default.start()
start_time = stopwatch_default._start
stopwatch_default.reset()
assert stopwatch_default._start is not None
assert stopwatch_default._start != start_time
assert stopwatch_default._running is True
assert stopwatch_default._end is None
assert stopwatch_default._secs is None
assert stopwatch_default._msecs is None
@pytest.mark.parametrize("started", [
True,
False
])
def test_stop(stopwatch_default, started):
if started:
stopwatch_default.start()
stopwatch_default.stop()
if started:
assert stopwatch_default._start is not None
assert stopwatch_default._end is not None
else:
assert stopwatch_default._start is None
assert stopwatch_default._end is None
assert stopwatch_default._secs is None
assert stopwatch_default._msecs is None
assert stopwatch_default._running is False
@pytest.mark.parametrize("started", [
True,
False
])
def test_elapsed_time_in_secs(stopwatch_default, started):
if started:
stopwatch_default.start()
time_in_sec = stopwatch_default.elapsed_time_in_secs()
if started:
assert stopwatch_default._start is not None
assert stopwatch_default._end is not None
assert stopwatch_default._running is True
assert stopwatch_default._secs is not None
assert time_in_sec is not None
else:
assert stopwatch_default._start is None
assert stopwatch_default._end is None
assert stopwatch_default._running is False
assert stopwatch_default._secs is None
assert time_in_sec is None
assert stopwatch_default._msecs is None
@pytest.mark.parametrize("started", [
True,
False
])
def test_elapsed_time_in_msecs(stopwatch_default, started):
if started:
stopwatch_default.start()
time_in_msec = stopwatch_default.elapsed_time_in_msecs()
if started:
assert stopwatch_default._start is not None
assert stopwatch_default._end is not None
assert stopwatch_default._running is True
assert stopwatch_default._secs is not None
assert stopwatch_default._msecs is not None
assert time_in_msec is not None
assert stopwatch_default._msecs == stopwatch_default._secs * 1000
else:
assert stopwatch_default._start is None
assert stopwatch_default._end is None
assert stopwatch_default._running is False
assert stopwatch_default._secs is None
assert stopwatch_default._msecs is None
assert time_in_msec is None
| 32.326923
| 73
| 0.74301
| 436
| 3,362
| 5.408257
| 0.082569
| 0.427481
| 0.410517
| 0.308736
| 0.891433
| 0.805344
| 0.782019
| 0.751908
| 0.729432
| 0.729432
| 0
| 0.001508
| 0.210886
| 3,362
| 103
| 74
| 32.640777
| 0.887297
| 0
| 0
| 0.733333
| 0
| 0
| 0.008626
| 0
| 0
| 0
| 0
| 0
| 0.533333
| 1
| 0.077778
| false
| 0
| 0.022222
| 0.011111
| 0.111111
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
96c76d269dbe0285753eac716827033bae09a466
| 13,955
|
py
|
Python
|
library_model.py
|
sandipsandal/Library_Management_System
|
e4d9d65f1f6cc7743c1e53977b7d9e5c74f227b8
|
[
"MIT"
] | null | null | null |
library_model.py
|
sandipsandal/Library_Management_System
|
e4d9d65f1f6cc7743c1e53977b7d9e5c74f227b8
|
[
"MIT"
] | null | null | null |
library_model.py
|
sandipsandal/Library_Management_System
|
e4d9d65f1f6cc7743c1e53977b7d9e5c74f227b8
|
[
"MIT"
] | null | null | null |
import mysql.connector
from mysql.connector import Error,MySQLConnection
# from mysql.connector import
# from configparser import ConfigParser
from datetime import datetime
from datetime import date, timedelta
from conpars import read_db_config
#1. user registration
def create_user(name, user_contact,password,user_name):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "INSERT INTO library_user(u_name,u_contact,password,user_name) VALUES (%s, %s, %s, %s)"
val = (name,user_contact,password,user_name)
cursor.execute(sql, val)
connection.commit()
cursor.close()
return cursor.lastrowid
#check user registration
def create_user_check(user_name):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = 'SELECT * FROM library_user WHERE user_name = %s'
val = (user_name,)
cursor.execute(sql, val)
account = cursor.fetchone()
cursor.close()
return account
#3. user login
def user_login(user_name,password):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = 'SELECT u_id, u_name FROM library_user WHERE user_name = %s AND password = %s'
val = (user_name,password)
cursor.execute(sql, val)
account = cursor.fetchone()
cursor.close()
return account
#3A. user option choice for action perform
#3B. Notified user for 14 days to the user his issued book detail
def notify_day(user_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor(buffered=True)
dt = date.today() - timedelta(14)
sql = "SELECT b_id,b_name,b_author,b_rented_date FROM library_books WHERE b_rented_date < %s and b_rented_user =%s"
val = (dt,user_id)
cursor.execute(sql,val)
book_details = cursor.fetchall()
cursor.close()
return book_details
#3-31. update user detail
def update_user_details(update_user_name, update_user_contact,update_user_password, user_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "UPDATE library_user SET u_name = %s, u_contact = %s, password = %s WHERE u_id = %s"
val = (update_user_name, update_user_contact, update_user_password,user_id)
cursor.execute(sql, val)
connection.commit()
id = cursor.rowcount
cursor.close()
# return cursor.lastrowid
return id
#3-32. user issue a book from library
def user_issue_book(user_id,book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
today_date = date.today()
sql = "UPDATE library_books SET b_rented_user = %s, b_rented_date = %s WHERE (b_id = %s) AND (b_rented_user IS NULL)"
val = (user_id,today_date,book_id)
cursor.execute(sql, val)
connection.commit()
id = cursor.rowcount
cursor.close()
return id
#3-33. Get and Update user_fee detail
def get_day(book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "select b_rented_date, b_rented_user from library_books where b_id = %s"
val = (book_id,)
cursor.execute(sql,val)
rent_day_user = cursor.fetchone()
# cursor.fetchone()
# connection.commit()
cursor.close()
return rent_day_user
#3-33-B update_user_fee
def update_user_fee(fine,user_id): # fine = total_fee_value//user_id = details[1] = b_rented_user
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "UPDATE library_user SET u_fee = %s WHERE u_id = %s"
val = (fine,user_id)
cursor.execute(sql,val)
cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
#3-34. return book & update user fee (Return issued book by login user& update user fee of logged in user)
def get_user_id(book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
# sql = "select b_rented_date, b_rented_user from books where b_id = %s"
sql = "select b_rented_user from library_books where b_id = %s"
val = (book_id,)
cursor.execute(sql,val)
user_id = cursor.fetchone()
cursor.close()
return user_id
#3-34 B
def paid_user_fee(user_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "UPDATE library_user SET u_fee = NULL WHERE u_id = %s"
val = (user_id,)
cursor.execute(sql,val)
cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
#3-34-C
def return_book(book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "UPDATE library_books SET b_rented_date = NULL, b_rented_user = NULL WHERE b_id = %s"
val = (book_id,)
cursor.execute(sql,val)
cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
#2. librarian registration(for add new librarian data)
def create_librarian(librarian_name,librarian_contact,password,user_name):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "INSERT INTO college_librarian(l_name,l_contact,password,user_name) VALUES (%s, %s, %s, %s)"
val = (librarian_name,librarian_contact,password,user_name)
cursor.execute(sql, val)
cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
# check librarian registration
def create_librarian_check(user_name):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = 'SELECT * FROM college_librarian WHERE user_name = %s'
val = (user_name,)
cursor.execute(sql, val)
account = cursor.fetchone()
cursor.close()
return account
#4.librarian_login
def librarian_login(user_name,password):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = 'SELECT l_id,l_name FROM college_librarian WHERE user_name = %s AND password = %s'
val = (user_name,password)
cursor.execute(sql, val)
account = cursor.fetchone()
cursor.close()
return account
#4B. Notified librarian for 14 days to the user his issued book detail after librarian login
def notify_librarian_user_day():
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor(buffered=True)
dt = date.today() - timedelta(14)
sql = "SELECT b_rented_user,b_name,b_author,b_rented_date FROM library_books WHERE b_rented_date < %s "
val = (dt,)
cursor.execute(sql,val)
book_details = cursor.fetchall()
cursor.close()
return book_details
#4-41. add books
def add_books(book_name, book_author, book_publication):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "INSERT INTO library_books(b_name,b_author,b_publication) VALUES (%s, %s, %s)"
val = (book_name, book_author, book_publication)
cursor.execute(sql, val)
# cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
#4-42. delete book
def delete_book(book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "DELETE FROM library_books WHERE b_id = %s"
val = (book_id,)
cursor.execute(sql, val)
# cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
def del_book_available(book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "SELECT * FROM library_books WHERE b_id = %s"
val = (book_id,)
cursor.execute(sql, val)
avl_book = cursor.fetchone()
cursor.close()
return avl_book
#4-43. update book details by Librarian
def update_book_details(update_book_name, update_book_author, update_book_publication, book_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "UPDATE library_books SET b_name = %s, b_author = %s, b_publication = %s WHERE b_id = %s"
val = (update_book_name, update_book_author, update_book_publication, book_id)
cursor.execute(sql, val)
cursor.rowcount
connection.commit()
cursor.close()
return cursor.lastrowid
#4-44. Delete Student(user) detail by librarianr
def delete_user(user_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "DELETE FROM library_user WHERE u_id = %s"
val = (user_id,)
cursor.execute(sql, val)
cursor.rowcount
connection.commit()
id = cursor.lastrowid
cursor.close()
return id
def delete_user_error(user_id):
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
sql = "SELECT u_name FROM library_user WHERE u_id = %s"
val = (user_id,)
cursor.execute(sql, val)
account = cursor.fetchone()
cursor.close()
return account
# 5. Display all books in library
def display_all_books():
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
cursor.execute("SELECT b_id, b_name, b_author, b_publication FROM library_books")
rows = cursor.fetchall()
cursor.close()
return rows
# 6. Diplay available books
def display_available_books():
# connection = mysql.connector.connect(host='127.0.0.1',database='library_management_sys_db',user='root',password='Password@123')
# cursor = connection.cursor()
dbconfig = read_db_config()
connection =MySQLConnection(**dbconfig)
cursor = connection.cursor()
cursor.execute('SELECT b_id,b_name, b_author, b_publication FROM library_books WHERE b_rented_user IS NULL')
rows = cursor.fetchall()
cursor.close()
return rows
| 36.341146
| 133
| 0.709566
| 1,843
| 13,955
| 5.176886
| 0.072708
| 0.077141
| 0.106069
| 0.07473
| 0.85599
| 0.827901
| 0.810816
| 0.788387
| 0.775286
| 0.775286
| 0
| 0.022199
| 0.16718
| 13,955
| 383
| 134
| 36.436031
| 0.798744
| 0.334289
| 0
| 0.720165
| 0
| 0.020576
| 0.176241
| 0.024324
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09465
| false
| 0.061728
| 0.020576
| 0
| 0.209877
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
96ebfd951c59ac859ef0448a1233912b7946bb43
| 4,466
|
py
|
Python
|
tests/test_apply_external_resources.py
|
sobolevn/paasta
|
8b87e0b13816c09b3d063b6d3271e6c7627fd264
|
[
"Apache-2.0"
] | 1,711
|
2015-11-10T18:04:56.000Z
|
2022-03-23T08:53:16.000Z
|
tests/test_apply_external_resources.py
|
sobolevn/paasta
|
8b87e0b13816c09b3d063b6d3271e6c7627fd264
|
[
"Apache-2.0"
] | 1,689
|
2015-11-10T17:59:04.000Z
|
2022-03-31T20:46:46.000Z
|
tests/test_apply_external_resources.py
|
sobolevn/paasta
|
8b87e0b13816c09b3d063b6d3271e6c7627fd264
|
[
"Apache-2.0"
] | 267
|
2015-11-10T19:17:16.000Z
|
2022-02-08T20:59:52.000Z
|
import os
from subprocess import CalledProcessError
import mock
import pytest
from paasta_tools.apply_external_resources import main
@pytest.fixture
def mock_run():
with mock.patch(
"paasta_tools.apply_external_resources.run", autospec=True
) as mock_runner:
yield mock_runner
@pytest.fixture(autouse=True)
def setup_external_files(fs):
fs.create_file(
"/external_resources/00-common/10-foo/10-deployment.yaml", contents="foo: bar",
)
fs.create_file(
"/external_resources/00-common/10-foo/20-service.yaml", contents="fizz: buzz",
)
fs.create_file(
"/external_resources/20-common/10-foo/20-deployment.yaml", contents="baz: biz",
)
fs.create_file(
"/external_resources/.applied/00-common/10-foo/10-deployment.yaml",
contents="foo: bar",
)
fs.create_file(
"/external_resources/.applied/00-common/10-foo/20-service.yaml",
contents="fizz: buzz",
)
fs.create_file(
"/external_resources/.applied/20-common/10-foo/20-deployment.yaml",
contents="baz: biz",
)
def test_no_changes(mock_run):
assert main("/external_resources") == 0
assert mock_run.call_count == 0
def test_resources_added_in_order(mock_run, fs):
fs.create_file(
"/external_resources/00-common/10-foo/30-hpa.yaml", contents="blah: blah",
)
fs.create_file(
"/external_resources/00-common/10-foo/40-service.yaml", contents="blah: blah",
)
fs.create_file(
"/external_resources/00-common/30-foo/10-deployment.yaml",
contents="blah: blah",
)
assert main("/external_resources") == 0
assert mock_run.call_args_list == [
mock.call(
[
"kubectl",
"apply",
"-f",
"/external_resources/00-common/10-foo/30-hpa.yaml",
],
check=True,
),
mock.call(
[
"kubectl",
"apply",
"-f",
"/external_resources/00-common/10-foo/40-service.yaml",
],
check=True,
),
mock.call(
[
"kubectl",
"apply",
"-f",
"/external_resources/00-common/30-foo/10-deployment.yaml",
],
check=True,
),
]
assert os.path.exists("/external_resources/.applied/00-common/10-foo/30-hpa.yaml")
assert os.path.exists(
"/external_resources/.applied/00-common/10-foo/40-service.yaml"
)
assert os.path.exists(
"/external_resources/.applied/00-common/30-foo/10-deployment.yaml"
)
def test_resources_deleted_in_reverse_order(mock_run, fs):
fs.create_file(
"/external_resources/.applied/00-common/10-foo/30-hpa.yaml",
contents="blah: blah",
)
fs.create_file(
"/external_resources/.applied/00-common/10-foo/40-service.yaml",
contents="blah: blah",
)
assert main("/external_resources") == 0
assert mock_run.call_args_list == [
mock.call(
[
"kubectl",
"delete",
"--ignore-not-found=true",
"-f",
"/external_resources/.applied/00-common/10-foo/40-service.yaml",
],
check=True,
),
mock.call(
[
"kubectl",
"delete",
"--ignore-not-found=true",
"-f",
"/external_resources/.applied/00-common/10-foo/30-hpa.yaml",
],
check=True,
),
]
assert not os.path.exists(
"/external_resources/.applied/00-common/10-foo/30-hpa.yaml"
)
assert not os.path.exists(
"/external_resources/.applied/00-common/10-foo/40-service.yaml"
)
def test_kubectl_fails(mock_run, fs):
mock_run.side_effect = [CalledProcessError(cmd="kubectl", returncode=1), None]
fs.create_file(
"/external_resources/00-common/10-foo/30-hpa.yaml", contents="blah: blah",
)
fs.create_file(
"/external_resources/00-common/10-foo/40-service.yaml", contents="blah: blah",
)
assert main("/external_resources") == 1
assert not os.path.exists(
"/external_resources/.applied/00-common/10-foo/30-hpa.yaml"
)
assert os.path.exists(
"/external_resources/.applied/00-common/10-foo/40-service.yaml"
)
| 29.189542
| 87
| 0.576131
| 520
| 4,466
| 4.794231
| 0.15
| 0.211392
| 0.097072
| 0.104292
| 0.844765
| 0.818291
| 0.815483
| 0.80706
| 0.80706
| 0.757722
| 0
| 0.048689
| 0.282579
| 4,466
| 152
| 88
| 29.381579
| 0.729401
| 0
| 0
| 0.572464
| 0
| 0
| 0.398343
| 0.336319
| 0
| 0
| 0
| 0
| 0.101449
| 1
| 0.043478
| false
| 0
| 0.036232
| 0
| 0.07971
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96fecf154e03c91dd2551df23348d34a87a73422
| 41,359
|
py
|
Python
|
sdk/python/pulumi_artifactory/pull_replication.py
|
pulumi/terraform-provider-artifactory
|
4f217f2e6bc2f7e5395a148cd3b3b7b5aaa66372
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2021-11-17T15:06:59.000Z
|
2022-03-21T02:36:15.000Z
|
sdk/python/pulumi_artifactory/pull_replication.py
|
pulumi/terraform-provider-artifactory
|
4f217f2e6bc2f7e5395a148cd3b3b7b5aaa66372
|
[
"ECL-2.0",
"Apache-2.0"
] | 113
|
2021-11-09T14:14:50.000Z
|
2022-03-31T23:18:29.000Z
|
sdk/python/pulumi_artifactory/pull_replication.py
|
pulumi/terraform-provider-artifactory
|
4f217f2e6bc2f7e5395a148cd3b3b7b5aaa66372
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-11-22T11:19:48.000Z
|
2021-12-17T01:39:20.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['PullReplicationArgs', 'PullReplication']
@pulumi.input_type
class PullReplicationArgs:
def __init__(__self__, *,
cron_exp: pulumi.Input[str],
repo_key: pulumi.Input[str],
check_binary_existence_in_filestore: Optional[pulumi.Input[bool]] = None,
enable_event_replication: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
path_prefix: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[str]] = None,
socket_timeout_millis: Optional[pulumi.Input[int]] = None,
sync_deletes: Optional[pulumi.Input[bool]] = None,
sync_properties: Optional[pulumi.Input[bool]] = None,
sync_statistics: Optional[pulumi.Input[bool]] = None,
url: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a PullReplication resource.
:param pulumi.Input[bool] check_binary_existence_in_filestore: When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
:param pulumi.Input[bool] enable_event_replication: When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
:param pulumi.Input[bool] enabled: When set, this replication will be enabled when saved.
:param pulumi.Input[str] password: Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] path_prefix: Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
:param pulumi.Input[str] proxy: Proxy key from Artifactory Proxies setting
:param pulumi.Input[bool] sync_deletes: When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
:param pulumi.Input[bool] sync_properties: When set, the task also synchronizes the properties of replicated artifacts.
:param pulumi.Input[bool] sync_statistics: When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
:param pulumi.Input[str] url: The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] username: Required for local repository, but not needed for remote repository.
"""
pulumi.set(__self__, "cron_exp", cron_exp)
pulumi.set(__self__, "repo_key", repo_key)
if check_binary_existence_in_filestore is not None:
pulumi.set(__self__, "check_binary_existence_in_filestore", check_binary_existence_in_filestore)
if enable_event_replication is not None:
pulumi.set(__self__, "enable_event_replication", enable_event_replication)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if password is not None:
pulumi.set(__self__, "password", password)
if path_prefix is not None:
pulumi.set(__self__, "path_prefix", path_prefix)
if proxy is not None:
pulumi.set(__self__, "proxy", proxy)
if socket_timeout_millis is not None:
pulumi.set(__self__, "socket_timeout_millis", socket_timeout_millis)
if sync_deletes is not None:
pulumi.set(__self__, "sync_deletes", sync_deletes)
if sync_properties is not None:
pulumi.set(__self__, "sync_properties", sync_properties)
if sync_statistics is not None:
pulumi.set(__self__, "sync_statistics", sync_statistics)
if url is not None:
pulumi.set(__self__, "url", url)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter(name="cronExp")
def cron_exp(self) -> pulumi.Input[str]:
return pulumi.get(self, "cron_exp")
@cron_exp.setter
def cron_exp(self, value: pulumi.Input[str]):
pulumi.set(self, "cron_exp", value)
@property
@pulumi.getter(name="repoKey")
def repo_key(self) -> pulumi.Input[str]:
return pulumi.get(self, "repo_key")
@repo_key.setter
def repo_key(self, value: pulumi.Input[str]):
pulumi.set(self, "repo_key", value)
@property
@pulumi.getter(name="checkBinaryExistenceInFilestore")
def check_binary_existence_in_filestore(self) -> Optional[pulumi.Input[bool]]:
"""
When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
"""
return pulumi.get(self, "check_binary_existence_in_filestore")
@check_binary_existence_in_filestore.setter
def check_binary_existence_in_filestore(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "check_binary_existence_in_filestore", value)
@property
@pulumi.getter(name="enableEventReplication")
def enable_event_replication(self) -> Optional[pulumi.Input[bool]]:
"""
When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
"""
return pulumi.get(self, "enable_event_replication")
@enable_event_replication.setter
def enable_event_replication(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_event_replication", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When set, this replication will be enabled when saved.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="pathPrefix")
def path_prefix(self) -> Optional[pulumi.Input[str]]:
"""
Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
"""
return pulumi.get(self, "path_prefix")
@path_prefix.setter
def path_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path_prefix", value)
@property
@pulumi.getter
def proxy(self) -> Optional[pulumi.Input[str]]:
"""
Proxy key from Artifactory Proxies setting
"""
return pulumi.get(self, "proxy")
@proxy.setter
def proxy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "proxy", value)
@property
@pulumi.getter(name="socketTimeoutMillis")
def socket_timeout_millis(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "socket_timeout_millis")
@socket_timeout_millis.setter
def socket_timeout_millis(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "socket_timeout_millis", value)
@property
@pulumi.getter(name="syncDeletes")
def sync_deletes(self) -> Optional[pulumi.Input[bool]]:
"""
When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
"""
return pulumi.get(self, "sync_deletes")
@sync_deletes.setter
def sync_deletes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_deletes", value)
@property
@pulumi.getter(name="syncProperties")
def sync_properties(self) -> Optional[pulumi.Input[bool]]:
"""
When set, the task also synchronizes the properties of replicated artifacts.
"""
return pulumi.get(self, "sync_properties")
@sync_properties.setter
def sync_properties(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_properties", value)
@property
@pulumi.getter(name="syncStatistics")
def sync_statistics(self) -> Optional[pulumi.Input[bool]]:
"""
When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
"""
return pulumi.get(self, "sync_statistics")
@sync_statistics.setter
def sync_statistics(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_statistics", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
@pulumi.input_type
class _PullReplicationState:
def __init__(__self__, *,
check_binary_existence_in_filestore: Optional[pulumi.Input[bool]] = None,
cron_exp: Optional[pulumi.Input[str]] = None,
enable_event_replication: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
path_prefix: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[str]] = None,
repo_key: Optional[pulumi.Input[str]] = None,
socket_timeout_millis: Optional[pulumi.Input[int]] = None,
sync_deletes: Optional[pulumi.Input[bool]] = None,
sync_properties: Optional[pulumi.Input[bool]] = None,
sync_statistics: Optional[pulumi.Input[bool]] = None,
url: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering PullReplication resources.
:param pulumi.Input[bool] check_binary_existence_in_filestore: When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
:param pulumi.Input[bool] enable_event_replication: When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
:param pulumi.Input[bool] enabled: When set, this replication will be enabled when saved.
:param pulumi.Input[str] password: Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] path_prefix: Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
:param pulumi.Input[str] proxy: Proxy key from Artifactory Proxies setting
:param pulumi.Input[bool] sync_deletes: When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
:param pulumi.Input[bool] sync_properties: When set, the task also synchronizes the properties of replicated artifacts.
:param pulumi.Input[bool] sync_statistics: When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
:param pulumi.Input[str] url: The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] username: Required for local repository, but not needed for remote repository.
"""
if check_binary_existence_in_filestore is not None:
pulumi.set(__self__, "check_binary_existence_in_filestore", check_binary_existence_in_filestore)
if cron_exp is not None:
pulumi.set(__self__, "cron_exp", cron_exp)
if enable_event_replication is not None:
pulumi.set(__self__, "enable_event_replication", enable_event_replication)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if password is not None:
pulumi.set(__self__, "password", password)
if path_prefix is not None:
pulumi.set(__self__, "path_prefix", path_prefix)
if proxy is not None:
pulumi.set(__self__, "proxy", proxy)
if repo_key is not None:
pulumi.set(__self__, "repo_key", repo_key)
if socket_timeout_millis is not None:
pulumi.set(__self__, "socket_timeout_millis", socket_timeout_millis)
if sync_deletes is not None:
pulumi.set(__self__, "sync_deletes", sync_deletes)
if sync_properties is not None:
pulumi.set(__self__, "sync_properties", sync_properties)
if sync_statistics is not None:
pulumi.set(__self__, "sync_statistics", sync_statistics)
if url is not None:
pulumi.set(__self__, "url", url)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter(name="checkBinaryExistenceInFilestore")
def check_binary_existence_in_filestore(self) -> Optional[pulumi.Input[bool]]:
"""
When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
"""
return pulumi.get(self, "check_binary_existence_in_filestore")
@check_binary_existence_in_filestore.setter
def check_binary_existence_in_filestore(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "check_binary_existence_in_filestore", value)
@property
@pulumi.getter(name="cronExp")
def cron_exp(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cron_exp")
@cron_exp.setter
def cron_exp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cron_exp", value)
@property
@pulumi.getter(name="enableEventReplication")
def enable_event_replication(self) -> Optional[pulumi.Input[bool]]:
"""
When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
"""
return pulumi.get(self, "enable_event_replication")
@enable_event_replication.setter
def enable_event_replication(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_event_replication", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When set, this replication will be enabled when saved.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="pathPrefix")
def path_prefix(self) -> Optional[pulumi.Input[str]]:
"""
Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
"""
return pulumi.get(self, "path_prefix")
@path_prefix.setter
def path_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path_prefix", value)
@property
@pulumi.getter
def proxy(self) -> Optional[pulumi.Input[str]]:
"""
Proxy key from Artifactory Proxies setting
"""
return pulumi.get(self, "proxy")
@proxy.setter
def proxy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "proxy", value)
@property
@pulumi.getter(name="repoKey")
def repo_key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "repo_key")
@repo_key.setter
def repo_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_key", value)
@property
@pulumi.getter(name="socketTimeoutMillis")
def socket_timeout_millis(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "socket_timeout_millis")
@socket_timeout_millis.setter
def socket_timeout_millis(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "socket_timeout_millis", value)
@property
@pulumi.getter(name="syncDeletes")
def sync_deletes(self) -> Optional[pulumi.Input[bool]]:
"""
When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
"""
return pulumi.get(self, "sync_deletes")
@sync_deletes.setter
def sync_deletes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_deletes", value)
@property
@pulumi.getter(name="syncProperties")
def sync_properties(self) -> Optional[pulumi.Input[bool]]:
"""
When set, the task also synchronizes the properties of replicated artifacts.
"""
return pulumi.get(self, "sync_properties")
@sync_properties.setter
def sync_properties(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_properties", value)
@property
@pulumi.getter(name="syncStatistics")
def sync_statistics(self) -> Optional[pulumi.Input[bool]]:
"""
When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
"""
return pulumi.get(self, "sync_statistics")
@sync_statistics.setter
def sync_statistics(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_statistics", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
class PullReplication(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
check_binary_existence_in_filestore: Optional[pulumi.Input[bool]] = None,
cron_exp: Optional[pulumi.Input[str]] = None,
enable_event_replication: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
path_prefix: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[str]] = None,
repo_key: Optional[pulumi.Input[str]] = None,
socket_timeout_millis: Optional[pulumi.Input[int]] = None,
sync_deletes: Optional[pulumi.Input[bool]] = None,
sync_properties: Optional[pulumi.Input[bool]] = None,
sync_statistics: Optional[pulumi.Input[bool]] = None,
url: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an Artifactory pull replication resource. This can be used to create and manage pull replication in Artifactory
for a local or remote repo. Pull replication provides a convenient way to proactively populate a remote cache, and is very useful
when waiting for new artifacts to arrive on demand (when first requested) is not desirable due to network latency.
See the [Official Documentation](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-PullReplication).
## Example Usage
```python
import pulumi
import pulumi_artifactory as artifactory
# Create a replication between two artifactory local repositories
provider_test_source = artifactory.LocalMavenRepository("providerTestSource", key="provider_test_source")
provider_test_dest = artifactory.RemoteMavenRepository("providerTestDest",
key="provider_test_dest",
password="bar",
url=f"https://example.com/artifactory/{artifactory_local_maven_repository['artifactory_local_maven_repository']['key']}",
username="foo")
remote_rep = artifactory.PullReplication("remote-rep",
cron_exp="0 0 * * * ?",
enable_event_replication=True,
repo_key=provider_test_dest.key)
```
## Import
Pull replication config can be imported using its repo key, e.g.
```sh
$ pulumi import artifactory:index/pullReplication:PullReplication foo-rep repository-key
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] check_binary_existence_in_filestore: When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
:param pulumi.Input[bool] enable_event_replication: When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
:param pulumi.Input[bool] enabled: When set, this replication will be enabled when saved.
:param pulumi.Input[str] password: Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] path_prefix: Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
:param pulumi.Input[str] proxy: Proxy key from Artifactory Proxies setting
:param pulumi.Input[bool] sync_deletes: When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
:param pulumi.Input[bool] sync_properties: When set, the task also synchronizes the properties of replicated artifacts.
:param pulumi.Input[bool] sync_statistics: When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
:param pulumi.Input[str] url: The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] username: Required for local repository, but not needed for remote repository.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PullReplicationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an Artifactory pull replication resource. This can be used to create and manage pull replication in Artifactory
for a local or remote repo. Pull replication provides a convenient way to proactively populate a remote cache, and is very useful
when waiting for new artifacts to arrive on demand (when first requested) is not desirable due to network latency.
See the [Official Documentation](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-PullReplication).
## Example Usage
```python
import pulumi
import pulumi_artifactory as artifactory
# Create a replication between two artifactory local repositories
provider_test_source = artifactory.LocalMavenRepository("providerTestSource", key="provider_test_source")
provider_test_dest = artifactory.RemoteMavenRepository("providerTestDest",
key="provider_test_dest",
password="bar",
url=f"https://example.com/artifactory/{artifactory_local_maven_repository['artifactory_local_maven_repository']['key']}",
username="foo")
remote_rep = artifactory.PullReplication("remote-rep",
cron_exp="0 0 * * * ?",
enable_event_replication=True,
repo_key=provider_test_dest.key)
```
## Import
Pull replication config can be imported using its repo key, e.g.
```sh
$ pulumi import artifactory:index/pullReplication:PullReplication foo-rep repository-key
```
:param str resource_name: The name of the resource.
:param PullReplicationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PullReplicationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
check_binary_existence_in_filestore: Optional[pulumi.Input[bool]] = None,
cron_exp: Optional[pulumi.Input[str]] = None,
enable_event_replication: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
path_prefix: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[str]] = None,
repo_key: Optional[pulumi.Input[str]] = None,
socket_timeout_millis: Optional[pulumi.Input[int]] = None,
sync_deletes: Optional[pulumi.Input[bool]] = None,
sync_properties: Optional[pulumi.Input[bool]] = None,
sync_statistics: Optional[pulumi.Input[bool]] = None,
url: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PullReplicationArgs.__new__(PullReplicationArgs)
__props__.__dict__["check_binary_existence_in_filestore"] = check_binary_existence_in_filestore
if cron_exp is None and not opts.urn:
raise TypeError("Missing required property 'cron_exp'")
__props__.__dict__["cron_exp"] = cron_exp
__props__.__dict__["enable_event_replication"] = enable_event_replication
__props__.__dict__["enabled"] = enabled
__props__.__dict__["password"] = password
__props__.__dict__["path_prefix"] = path_prefix
__props__.__dict__["proxy"] = proxy
if repo_key is None and not opts.urn:
raise TypeError("Missing required property 'repo_key'")
__props__.__dict__["repo_key"] = repo_key
__props__.__dict__["socket_timeout_millis"] = socket_timeout_millis
__props__.__dict__["sync_deletes"] = sync_deletes
__props__.__dict__["sync_properties"] = sync_properties
__props__.__dict__["sync_statistics"] = sync_statistics
__props__.__dict__["url"] = url
__props__.__dict__["username"] = username
super(PullReplication, __self__).__init__(
'artifactory:index/pullReplication:PullReplication',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
check_binary_existence_in_filestore: Optional[pulumi.Input[bool]] = None,
cron_exp: Optional[pulumi.Input[str]] = None,
enable_event_replication: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
password: Optional[pulumi.Input[str]] = None,
path_prefix: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[str]] = None,
repo_key: Optional[pulumi.Input[str]] = None,
socket_timeout_millis: Optional[pulumi.Input[int]] = None,
sync_deletes: Optional[pulumi.Input[bool]] = None,
sync_properties: Optional[pulumi.Input[bool]] = None,
sync_statistics: Optional[pulumi.Input[bool]] = None,
url: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None) -> 'PullReplication':
"""
Get an existing PullReplication resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] check_binary_existence_in_filestore: When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
:param pulumi.Input[bool] enable_event_replication: When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
:param pulumi.Input[bool] enabled: When set, this replication will be enabled when saved.
:param pulumi.Input[str] password: Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] path_prefix: Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
:param pulumi.Input[str] proxy: Proxy key from Artifactory Proxies setting
:param pulumi.Input[bool] sync_deletes: When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
:param pulumi.Input[bool] sync_properties: When set, the task also synchronizes the properties of replicated artifacts.
:param pulumi.Input[bool] sync_statistics: When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
:param pulumi.Input[str] url: The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
:param pulumi.Input[str] username: Required for local repository, but not needed for remote repository.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PullReplicationState.__new__(_PullReplicationState)
__props__.__dict__["check_binary_existence_in_filestore"] = check_binary_existence_in_filestore
__props__.__dict__["cron_exp"] = cron_exp
__props__.__dict__["enable_event_replication"] = enable_event_replication
__props__.__dict__["enabled"] = enabled
__props__.__dict__["password"] = password
__props__.__dict__["path_prefix"] = path_prefix
__props__.__dict__["proxy"] = proxy
__props__.__dict__["repo_key"] = repo_key
__props__.__dict__["socket_timeout_millis"] = socket_timeout_millis
__props__.__dict__["sync_deletes"] = sync_deletes
__props__.__dict__["sync_properties"] = sync_properties
__props__.__dict__["sync_statistics"] = sync_statistics
__props__.__dict__["url"] = url
__props__.__dict__["username"] = username
return PullReplication(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="checkBinaryExistenceInFilestore")
def check_binary_existence_in_filestore(self) -> pulumi.Output[Optional[bool]]:
"""
When true, enables distributed checksum storage. For more information, see
[Optimizing Repository Replication with Checksum-Based Storage](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-OptimizingRepositoryReplicationUsingStorageLevelSynchronizationOptions).
"""
return pulumi.get(self, "check_binary_existence_in_filestore")
@property
@pulumi.getter(name="cronExp")
def cron_exp(self) -> pulumi.Output[str]:
return pulumi.get(self, "cron_exp")
@property
@pulumi.getter(name="enableEventReplication")
def enable_event_replication(self) -> pulumi.Output[bool]:
"""
When set, each event will trigger replication of the artifacts changed in this event. This can be any type of event on artifact, e.g. added, deleted or property change.
"""
return pulumi.get(self, "enable_event_replication")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[bool]:
"""
When set, this replication will be enabled when saved.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def password(self) -> pulumi.Output[Optional[str]]:
"""
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="pathPrefix")
def path_prefix(self) -> pulumi.Output[Optional[str]]:
"""
Only artifacts that located in path that matches the subpath within the remote repository will be replicated.
"""
return pulumi.get(self, "path_prefix")
@property
@pulumi.getter
def proxy(self) -> pulumi.Output[Optional[str]]:
"""
Proxy key from Artifactory Proxies setting
"""
return pulumi.get(self, "proxy")
@property
@pulumi.getter(name="repoKey")
def repo_key(self) -> pulumi.Output[str]:
return pulumi.get(self, "repo_key")
@property
@pulumi.getter(name="socketTimeoutMillis")
def socket_timeout_millis(self) -> pulumi.Output[int]:
return pulumi.get(self, "socket_timeout_millis")
@property
@pulumi.getter(name="syncDeletes")
def sync_deletes(self) -> pulumi.Output[bool]:
"""
When set, items that were deleted locally should also be deleted remotely (also applies to properties metadata).
"""
return pulumi.get(self, "sync_deletes")
@property
@pulumi.getter(name="syncProperties")
def sync_properties(self) -> pulumi.Output[bool]:
"""
When set, the task also synchronizes the properties of replicated artifacts.
"""
return pulumi.get(self, "sync_properties")
@property
@pulumi.getter(name="syncStatistics")
def sync_statistics(self) -> pulumi.Output[bool]:
"""
When set, artifact download statistics will also be replicated. Set to avoid inadvertent cleanup at the target instance when setting up replication for disaster recovery.
"""
return pulumi.get(self, "sync_statistics")
@property
@pulumi.getter
def url(self) -> pulumi.Output[Optional[str]]:
"""
The URL of the target local repository on a remote Artifactory server. For some package types, you need to prefix the repository key in the URL with api/<pkg>.
For a list of package types where this is required, see the [note](https://www.jfrog.com/confluence/display/JFROG/Repository+Replication#RepositoryReplication-anchorPREFIX).
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "url")
@property
@pulumi.getter
def username(self) -> pulumi.Output[Optional[str]]:
"""
Required for local repository, but not needed for remote repository.
"""
return pulumi.get(self, "username")
| 51.505604
| 243
| 0.68285
| 4,884
| 41,359
| 5.589066
| 0.05733
| 0.070118
| 0.083526
| 0.045939
| 0.935634
| 0.929333
| 0.920028
| 0.914643
| 0.907243
| 0.897168
| 0
| 0.000156
| 0.224256
| 41,359
| 802
| 244
| 51.569825
| 0.850642
| 0.403612
| 0
| 0.849462
| 1
| 0
| 0.103104
| 0.04068
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165591
| false
| 0.051613
| 0.010753
| 0.019355
| 0.275269
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
8c0470f67ea4fbf14b6ec5ce5943a98a3c18cba4
| 188
|
py
|
Python
|
scoss/metrics/__init__.py
|
ngocjr7/scoss
|
a70f56a076ea5d64158625d5f221987a0210cc2d
|
[
"MIT"
] | 4
|
2021-06-15T21:32:40.000Z
|
2022-02-23T08:16:10.000Z
|
scoss/metrics/__init__.py
|
ngocjr7/scoss
|
a70f56a076ea5d64158625d5f221987a0210cc2d
|
[
"MIT"
] | 1
|
2021-04-18T18:16:59.000Z
|
2021-04-18T18:16:59.000Z
|
scoss/metrics/__init__.py
|
ngocjr7/scoss
|
a70f56a076ea5d64158625d5f221987a0210cc2d
|
[
"MIT"
] | 1
|
2021-05-09T19:33:43.000Z
|
2021-05-09T19:33:43.000Z
|
from __future__ import absolute_import
from .metric import Metric
from .operator_based_metric import *
from .metric_list import MetricList, all_metrics
from .token_based_metric import *
| 23.5
| 48
| 0.840426
| 26
| 188
| 5.653846
| 0.461538
| 0.244898
| 0.217687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12234
| 188
| 7
| 49
| 26.857143
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8c06881577471655556071cdd4a7ca138f889a1f
| 19,629
|
py
|
Python
|
io_scene_vrm/editor/vrm1/operator.py
|
iCyP/VRM_IMPORTER_for_Blender2.8
|
fdabb11f125eea9363061ba240dc5b4376f4143d
|
[
"MIT"
] | 26
|
2020-05-25T07:24:57.000Z
|
2020-08-27T06:43:48.000Z
|
io_scene_vrm/editor/vrm1/operator.py
|
iCyP/VRM_IMPORTER_for_Blender2.8
|
fdabb11f125eea9363061ba240dc5b4376f4143d
|
[
"MIT"
] | 3
|
2020-06-05T15:09:32.000Z
|
2020-08-13T09:46:13.000Z
|
io_scene_vrm/editor/vrm1/operator.py
|
iCyP/VRM_IMPORTER_for_Blender2.8
|
fdabb11f125eea9363061ba240dc5b4376f4143d
|
[
"MIT"
] | 1
|
2020-05-25T07:25:47.000Z
|
2020-05-25T07:25:47.000Z
|
from typing import Set
import bpy
class VRM_OT_add_vrm1_meta_author(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_meta_author"
bl_label = "Add Author"
bl_description = "Add VRM 1.0 Meta Author"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if not isinstance(armature, bpy.types.Armature):
return {"CANCELLED"}
armature.vrm_addon_extension.vrm1.meta.authors.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_meta_author(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_meta_author"
bl_label = "Remove Author"
bl_description = "Remove VRM 1.0 Meta Author"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
author_index: bpy.props.IntProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if not isinstance(armature, bpy.types.Armature):
return {"CANCELLED"}
authors = armature.vrm_addon_extension.vrm1.meta.authors
if len(authors) <= self.author_index:
return {"CANCELLED"}
authors.remove(self.author_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_meta_reference(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_meta_reference"
bl_label = "Add Reference"
bl_description = "Add VRM 1.0 Meta Reference"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if not isinstance(armature, bpy.types.Armature):
return {"CANCELLED"}
armature.vrm_addon_extension.vrm1.meta.references.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_meta_reference(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_meta_reference"
bl_label = "Remove Reference"
bl_description = "Remove VRM 1.0 Meta Reference"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
reference_index: bpy.props.IntProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if not isinstance(armature, bpy.types.Armature):
return {"CANCELLED"}
references = armature.vrm_addon_extension.vrm1.meta.references
if len(references) <= self.reference_index:
return {"CANCELLED"}
references.remove(self.reference_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_expressions_custom_expression(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_expressions_custom_expression"
bl_label = "Add Custom Expression"
bl_description = "Add VRM 1.0 Custom Expression"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
custom_expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if not isinstance(armature, bpy.types.Armature):
return {"CANCELLED"}
custom_expression = armature.vrm_addon_extension.vrm1.expressions.custom.add()
custom_expression.custom_name = self.custom_expression_name
return {"FINISHED"}
class VRM_OT_remove_vrm1_expressions_custom_expression(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_expressions_custom_expression"
bl_label = "Remove Custom Expression"
bl_description = "Remove VRM 1.0 Custom Expression"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
custom_expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if not isinstance(armature, bpy.types.Armature):
return {"CANCELLED"}
for custom_index, custom_expression in enumerate(
list(armature.vrm_addon_extension.vrm1.expressions.custom.values())
):
if custom_expression.custom_name == self.custom_expression_name:
armature.vrm_addon_extension.vrm1.expressions.custom.remove(
custom_index
)
return {"FINISHED"}
return {"CANCELLED"}
class VRM_OT_add_vrm1_first_person_mesh_annotation(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_first_person_mesh_annotation"
bl_label = "Add Mesh Annotation"
bl_description = "Add VRM 1.0 First Person Mesh Annotation"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
armature.vrm_addon_extension.vrm1.first_person.mesh_annotations.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_first_person_mesh_annotation(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_first_person_mesh_annotation"
bl_label = "Remove Mesh Annotation"
bl_description = "Remove VRM 1.0 First Person Mesh Annotation"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
mesh_annotation_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
mesh_annotations = (
armature.vrm_addon_extension.vrm1.first_person.mesh_annotations
)
if len(mesh_annotations) <= self.mesh_annotation_index:
return {"CANCELLED"}
mesh_annotations.remove(self.mesh_annotation_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_material_value_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_material_value_bind"
bl_label = "Add material value bind"
bl_description = "Add VRM 0.x BlendShape Material Value Bind"
bl_options = {"REGISTER", "UNDO"}
armature_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
blend_shape_group_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.objects.get(self.armature_name)
if armature is None or armature.type != "ARMATURE":
return {"CANCELLED"}
blend_shape_groups = (
armature.data.vrm_addon_extension.vrm1.blend_shape_master.blend_shape_groups
)
if len(blend_shape_groups) <= self.blend_shape_group_index:
return {"CANCELLED"}
blend_shape_groups[self.blend_shape_group_index].material_values.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_material_value_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_material_value_bind"
bl_label = "Remove material value bind"
bl_description = "Remove VRM 0.x BlendShape Material Value Bind"
bl_options = {"REGISTER", "UNDO"}
armature_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
blend_shape_group_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
material_value_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.objects.get(self.armature_name)
if armature is None or armature.type != "ARMATURE":
return {"CANCELLED"}
blend_shape_groups = (
armature.data.vrm_addon_extension.vrm1.blend_shape_master.blend_shape_groups
)
if len(blend_shape_groups) <= self.blend_shape_group_index:
return {"CANCELLED"}
material_values = blend_shape_groups[
self.blend_shape_group_index
].material_values
if len(material_values) <= self.material_value_index:
return {"CANCELLED"}
material_values.remove(self.material_value_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_material_value_bind_target_value(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_material_value_bind_target_value"
bl_label = "Add value"
bl_description = "Add VRM 0.x BlendShape Material Value Bind"
bl_options = {"REGISTER", "UNDO"}
armature_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
blend_shape_group_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
material_value_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.objects.get(self.armature_name)
if armature is None or armature.type != "ARMATURE":
return {"CANCELLED"}
blend_shape_groups = (
armature.data.vrm_addon_extension.vrm1.blend_shape_master.blend_shape_groups
)
if len(blend_shape_groups) <= self.blend_shape_group_index:
return {"CANCELLED"}
material_values = blend_shape_groups[
self.blend_shape_group_index
].material_values
if len(material_values) <= self.material_value_index:
return {"CANCELLED"}
material_values[self.material_value_index].target_value.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_material_value_bind_target_value(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_material_value_bind_target_value"
bl_label = "Remove value"
bl_description = "Remove VRM 0.x BlendShape Material Value Bind"
bl_options = {"REGISTER", "UNDO"}
armature_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
blend_shape_group_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
material_value_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
target_value_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.objects.get(self.armature_name)
if armature is None or armature.type != "ARMATURE":
return {"CANCELLED"}
blend_shape_groups = (
armature.data.vrm_addon_extension.vrm1.blend_shape_master.blend_shape_groups
)
if len(blend_shape_groups) <= self.blend_shape_group_index:
return {"CANCELLED"}
material_values = blend_shape_groups[
self.blend_shape_group_index
].material_values
if len(material_values) <= self.material_value_index:
return {"CANCELLED"}
target_value = material_values[self.material_value_index].target_value
if len(target_value) <= self.target_value_index:
return {"CANCELLED"}
target_value.remove(self.target_value_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_expression_morph_target_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_expression_morph_target_bind"
bl_label = "Add Morph Target Bind"
bl_description = "Add VRM 1.0 Expression Morph Target Bind"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
expressions = armature.vrm_addon_extension.vrm1.expressions
expression = expressions.all_name_to_expression_dict().get(self.expression_name)
if expression is None:
return {"CANCELLED"}
expression.morph_target_binds.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_expression_morph_target_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_expression_morph_target_bind"
bl_label = "Remove Morph Target Bind"
bl_description = "Remove VRM 1.0 Expression Morph Target Bind"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
bind_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
expressions = armature.vrm_addon_extension.vrm1.expressions
expression = expressions.all_name_to_expression_dict().get(self.expression_name)
if expression is None:
return {"CANCELLED"}
if len(expression.morph_target_binds) <= self.bind_index:
return {"CANCELLED"}
expression.morph_target_binds.remove(self.bind_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_expression_material_color_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_expression_material_color_bind"
bl_label = "Add Material Color Bind"
bl_description = "Add VRM 1.0 Expression Material Value Bind"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
expression = armature.vrm_addon_extension.vrm1.expressions.all_name_to_expression_dict().get(
self.expression_name
)
if expression is None:
return {"CANCELLED"}
expression.material_color_binds.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_expression_material_color_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_expression_material_color_bind"
bl_label = "Remove Material Color Bind"
bl_description = "Remove VRM 1.0 Expression Material Color Bind"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
bind_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
expression = armature.vrm_addon_extension.vrm1.expressions.all_name_to_expression_dict().get(
self.expression_name
)
if expression is None:
return {"CANCELLED"}
if len(expression.material_color_binds) <= self.bind_index:
return {"CANCELLED"}
expression.material_color_binds.remove(self.bind_index)
return {"FINISHED"}
class VRM_OT_add_vrm1_expression_texture_transform_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.add_vrm1_expression_texture_transform_bind"
bl_label = "Add Texture Transform Bind"
bl_description = "Add VRM 1.0 Expression Texture Transform Bind"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
expression = armature.vrm_addon_extension.vrm1.expressions.all_name_to_expression_dict().get(
self.expression_name
)
if expression is None:
return {"CANCELLED"}
expression.texture_transform_binds.add()
return {"FINISHED"}
class VRM_OT_remove_vrm1_expression_texture_transform_bind(bpy.types.Operator): # type: ignore[misc] # noqa: N801
bl_idname = "vrm.remove_vrm1_expression_texture_transform_bind"
bl_label = "Remove Texture Transform Bind"
bl_description = "Remove VRM 1.0 Expression Texture Transform Bind"
bl_options = {"REGISTER", "UNDO"}
armature_data_name: bpy.props.StringProperty( # type: ignore[valid-type]
options={"HIDDEN"} # noqa: F821
)
expression_name: bpy.props.StringProperty() # type: ignore[valid-type]
bind_index: bpy.props.IntProperty( # type: ignore[valid-type]
min=0, options={"HIDDEN"} # noqa: F821
)
def execute(self, _context: bpy.types.Context) -> Set[str]:
armature = bpy.data.armatures.get(self.armature_data_name)
if armature is None:
return {"CANCELLED"}
expression = armature.vrm_addon_extension.vrm1.expressions.all_name_to_expression_dict().get(
self.expression_name
)
if expression is None:
return {"CANCELLED"}
if len(expression.texture_transform_binds) <= self.bind_index:
return {"CANCELLED"}
expression.texture_transform_binds.remove(self.bind_index)
return {"FINISHED"}
| 42.212903
| 114
| 0.678231
| 2,404
| 19,629
| 5.286606
| 0.039517
| 0.045637
| 0.047211
| 0.0598
| 0.950744
| 0.927768
| 0.904556
| 0.84751
| 0.809348
| 0.792195
| 0
| 0.015723
| 0.212645
| 19,629
| 464
| 115
| 42.303879
| 0.8066
| 0.096133
| 0
| 0.616368
| 0
| 0
| 0.15163
| 0.040204
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046036
| false
| 0
| 0.005115
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
8c4d67ed38982987ff97a7578c50ebf53bf6e25f
| 25,640
|
py
|
Python
|
ANSPatterns/uppercase_alphabets/ualp.py
|
SaikumarS2611/ANSPatterns
|
a1800d1ac10b30c85e286ac3543cd8a19d1094ed
|
[
"MIT"
] | null | null | null |
ANSPatterns/uppercase_alphabets/ualp.py
|
SaikumarS2611/ANSPatterns
|
a1800d1ac10b30c85e286ac3543cd8a19d1094ed
|
[
"MIT"
] | null | null | null |
ANSPatterns/uppercase_alphabets/ualp.py
|
SaikumarS2611/ANSPatterns
|
a1800d1ac10b30c85e286ac3543cd8a19d1094ed
|
[
"MIT"
] | null | null | null |
def for_A():
""" Upper case Alphabet letter 'A' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if row==0 and col%4!=0 or col%4==0 and row>0 or row==3:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_A():
""" Upper case Alphabet letter 'A' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if row==0 and col%4!=0 or col%4==0 and row>0 or row==3:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_B():
""" Upper case Alphabet letter 'B' pattern using Python for loop"""
for row in range(7):
for col in range(5):
if col==0 or row%3==0 and col<4 or col==4 and row%3!=0:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_B():
""" Upper case Alphabet letter 'B' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<5:
if col==0 or row%3==0 and col<4 or col==4 and row%3!=0:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row +=1
# ----------------------------------------------------------------------
def for_C():
""" Upper case Alphabet letter 'C' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if row in (0,5) and col>0 and col<4 or col==0 and row>0 and row<5 or col==4 and row in (1,4):
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_C():
""" Upper case Alphabet letter 'C' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if row in (0,5) and col>0 and col<4 or col==0 and row>0 and row<5 or col==4 and row in (1,4):
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row +=1
# ----------------------------------------------------------------------
# using for loop
def for_D():
""" Upper case Alphabet letter 'D' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if col==0 or row in (0,5) and col<4 or col==4 and row>0 and row<5:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
# using while loop
def while_D():
""" Upper case Alphabet letter 'D' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if col==0 or row in (0,5) and col<4 or col==4 and row>0 and row<5:
print('*', end = ' ')
else:
print(' ', end = ' ')
col +=1
print()
row +=1
# ----------------------------------------------------------------------
def for_E():
""" Upper case Alphabet letter 'E' pattern using Python for loop"""
for row in range(7):
for col in range(4):
if col==0 or row%3==0:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_E():
""" Upper case Alphabet letter 'E' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<4:
if col==0 or row%3==0:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row +=1
# ----------------------------------------------------------------------
def for_F():
""" Upper case Alphabet letter 'F' pattern using Python for loop"""
for row in range(7):
for col in range(4):
if col==0 or row%3==0 and row!=6:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_F():
""" Upper case Alphabet letter 'F' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<4:
if col==0 or row%3==0 and row!=6:
print('*', end = ' ')
else:
print(' ', end = ' ')
col +=1
print()
row +=1
# ----------------------------------------------------------------------
def for_G():
""" Upper case Alphabet letter 'G' pattern using Python for loop"""
for row in range(7):
for col in range(5):
if col==0 and row>0 and row<6 or row%3==0 and col>0 and col<4 and row!=3 or row==3 and col>1 or col==4 and (row>2 and row<6 or row==1):
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_G():
""" Upper case Alphabet letter 'G' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<5:
if col==0 and row>0 and row<6 or row%3==0 and col>0 and col<4 and row!=3 or row==3 and col>1 or col==4 and (row>2 and row<6 or row==1):
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row +=1
# ----------------------------------------------------------------------
def for_H():
""" Upper case Alphabet letter 'H' pattern using Python for loop"""
for row in range(7):
for col in range(5):
if col%4==0 or row==3:
print('*', end =' ')
else:
print(' ', end = ' ')
print()
def while_H():
""" Upper case Alphabet letter 'H' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<5:
if col%4==0 or row==3:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_I():
""" Upper case Alphabet letter 'I' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if row in (0,5) or col==2:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_I():
""" Upper case Alphabet letter 'I' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if row in (0,5) or col==2:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_J():
""" Upper case Alphabet letter 'J' pattern using Python Python for loop"""
for row in range(6):
for col in range(5):
if row==0 or col==2 and row<5 or row+col==6 and col<3 or col==0 and row==4:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_J():
""" Upper case Alphabet letter 'J' pattern using Python Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if row==0 or col==2 and row<5 or row+col==6 and col<3 or col==0 and row==4:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_K():
""" Upper case Alphabet letter 'K' pattern using Python Python for loop"""
for row in range(6):
for col in range(4):
if col==0 or row+col==3 or row-col==2:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_K():
""" Upper case Alphabet letter 'K' pattern using Python Python while loop"""
row = 0
while row<6:
col = 0
while col<4:
if col==0 or row+col==3 or row-col==2:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_L():
""" Upper case Alphabet letter 'L' pattern using Python Python for loop"""
for row in range(6):
for col in range(4):
if col==0 or row==5:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_L():
""" Upper case Alphabet letter 'L' pattern using Python Python while loop"""
for row in range(6):
for col in range(4):
if col==0 or row==5:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
# ----------------------------------------------------------------------
def for_M():
""" Upper case Alphabet letter 'M' pattern using Python Python for loop"""
for row in range(6):
for col in range(5):
if col in (0,4) or (row-col==0 or row+col==4) and row<3:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_M():
""" Upper case Alphabet letter 'M' pattern using Python Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if col in (0,4) or (row-col==0 or row+col==4) and row<3:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_N():
""" Upper case Alphabet letter 'N' pattern using Python Python for loop"""
for row in range(5):
for col in range(5):
if col in (0,4) or row-col==0:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_N():
""" Upper case Alphabet letter 'N' pattern using Python Python while loop"""
row = 0
while row<5:
col = 0
while col<5:
if col in (0,4) or row-col==0:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_O():
""" Upper case Alphabet letter 'O' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if row in (0,5) and col>0 and col<4 or col in (0,4) and row>0 and row<5:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_O():
""" Upper case Alphabet letter 'O' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if row in (0,5) and col>0 and col<4 or col in (0,4) and row>0 and row<5:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_P():
""" Upper case Alphabet letter 'P' pattern using Python for loop"""
for row in range(6):
for col in range(4):
if col==0 or row%3==0 and col<3 or col==3 and row%3!=0 and row<3:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_P():
""" Upper case Alphabet letter 'P' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<4:
if col==0 or row%3==0 and col<3 or col==3 and row%3!=0 and row<3:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row +=1
# ----------------------------------------------------------------------
def for_Q():
""" Upper case Alphabet letter 'Q' pattern using Python for loop"""
for row in range(5):
for col in range(5):
if col in (0,4) and row>0 and row<4 or row in (0,4) and col>0 and col<4 or col-row==0 and row>2:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_Q():
""" Upper case Alphabet letter 'Q' pattern using Python while loop"""
row = 0
while row<5:
col = 0
while col<5:
if col in (0,4) and row>0 and row<4 or row in (0,4) and col>0 and col<4 or col-row==0 and row>2:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_R():
""" Upper case Alphabet letter 'R' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if col==0 or row%3==0 and col<3 or col==3 and row%3!=0 and row<3 or row-col==2:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_R():
""" Upper case Alphabet letter 'R' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if col==0 or row%3==0 and col<3 or col==3 and row%3!=0 and row<3 or row-col==2:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_S():
""" Upper case Alphabet letter 'S' pattern using Python for loop"""
for row in range(7):
for col in range(5):
if row%3==0 and col>0 and col<4 or col==0 and row%3!=0 and row<3 or col==4 and row%3!=0 and row>3:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_S():
""" Upper case Alphabet letter 'S' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<5:
if row%3==0 and col>0 and col<4 or col==0 and row%3!=0 and row<3 or col==4 and row%3!=0 and row>3:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_T():
""" Upper case Alphabet letter 'T' pattern using Python for loop"""
for row in range(5):
for col in range(3):
if row==0 or col==1:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_T():
""" Upper case Alphabet letter 'T' pattern using Python while loop"""
row = 0
while row<5:
col = 0
while col<3:
if row==0 or col==1:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_U():
""" Upper case Alphabet letter 'U' pattern using Python for loop"""
for row in range(6):
for col in range(5):
if col%4==0 and row<5 or row==5 and col>0 and col<4:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_U():
""" Upper case Alphabet letter 'U' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<5:
if col%4==0 and row<5 or row==5 and col>0 and col<4:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_V():
""" Upper case Alphabet letter 'V' pattern using Python for loop"""
for row in range(7):
for col in range(13):
if row==col or row+col==12:
print('*', end = '')
else:
print(' ', end = '')
print()
def while_V():
""" Upper case Alphabet letter 'V' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<13:
if row==col or row+col==12:
print('*', end = ' ')
else:
print(' ', end ='')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_W():
""" Upper case Alphabet letter 'W' pattern using Python for loop"""
for i in range(5):
for j in range(27):
if (i==j or (i>1 and (i+j==8) or (i+j==13)) or (i==3 and i+j==11)):
print('*', end = '')
else:
print(' ', end = '')
print()
def while_W():
""" Upper case Alphabet letter 'W' pattern using Python while loop"""
row = 0
while row<5:
col = 0
while col<27:
if row==col or row>1 and row+col==8 or row+col==13 or row==3 and row+col==11:
print('*', end = '')
else:
print(' ', end = '')
col +=1
print()
row += 1
# ----------------------------------------------------------------------
def for_X():
""" Upper case Alphabet letter 'X' pattern using Python for loop"""
for row in range(5):
for col in range(6):
if row-col==0 or row+col==4:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_X():
""" Upper case Alphabet letter 'X' pattern using Python while loop"""
row = 0
while row<5:
col = 0
while col<6:
if row-col==0 or row+col==4:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_Y():
""" Upper case Alphabet letter 'Y' pattern using Python for loop"""
for row in range(7):
for col in range(7):
if col==3 and row>2 or (row-col==0 or row+col==6) and row<3:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_Y():
""" Upper case Alphabet letter 'Y' pattern using Python while loop"""
row = 0
while row<7:
col = 0
while col<7:
if col==3 and row>2 or (row-col==0 or row+col==6) and row<3:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
def for_Z():
""" Upper case Alphabet letter 'Z' pattern using Python for loop"""
for row in range(6):
for col in range(6):
if row in (0,5) or row+col==5:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
def while_Z():
""" Upper case Alphabet letter 'Z' pattern using Python while loop"""
row = 0
while row<6:
col = 0
while col<6:
if row in (0,5) or row+col==5:
print('*', end = ' ')
else:
print(' ', end = ' ')
col += 1
print()
row += 1
# ----------------------------------------------------------------------
| 36.628571
| 160
| 0.306084
| 2,478
| 25,640
| 3.146086
| 0.024213
| 0.106721
| 0.113391
| 0.153412
| 0.982299
| 0.980631
| 0.978194
| 0.978194
| 0.970626
| 0.82196
| 0
| 0.041646
| 0.520515
| 25,640
| 699
| 161
| 36.680973
| 0.592484
| 0.201716
| 0
| 0.877907
| 0
| 0
| 0.010346
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100775
| false
| 0
| 0
| 0
| 0.100775
| 0.302326
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4fbeccec8e3ecab3bc6941b100648e2d46b447b6
| 7,473
|
py
|
Python
|
tests/tests_linked_list_helpers.py
|
quervernetzt/linked-lists
|
4dda6402cdbdaa6150995428e99f100ae9214ce5
|
[
"MIT"
] | null | null | null |
tests/tests_linked_list_helpers.py
|
quervernetzt/linked-lists
|
4dda6402cdbdaa6150995428e99f100ae9214ce5
|
[
"MIT"
] | null | null | null |
tests/tests_linked_list_helpers.py
|
quervernetzt/linked-lists
|
4dda6402cdbdaa6150995428e99f100ae9214ce5
|
[
"MIT"
] | null | null | null |
import unittest
from solution.linked_list import LinkedList
from solution.linked_list_helpers import LinkedListHelpers
class TestCasesLinkedListHelpers(unittest.TestCase):
def execute_tests_both_linked_lists_none(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = None
linked_list_2: LinkedList = None
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
self.assertIsNone(result_union)
self.assertIsNone(result_intersection)
def execute_tests_both_linked_lists_first_is_none(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = None
linked_list_2: LinkedList = LinkedList()
element_2 = [6, 32, 4, 9, 6, 1, 11, 21, 1]
for i in element_2:
linked_list_2.append(i)
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
self.assertIsNone(result_union)
self.assertIsNone(result_intersection)
def execute_tests_both_linked_lists_second_is_none(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = LinkedList()
element_1 = [6, 32, 4, 9, 6, 1, 11, 21, 1]
for i in element_1:
linked_list_1.append(i)
linked_list_2: LinkedList = None
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
self.assertIsNone(result_union)
self.assertIsNone(result_intersection)
def execute_tests_both_linked_lists_both_empty_lists(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = LinkedList()
linked_list_2: LinkedList = LinkedList()
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
self.assertEqual(result_union.size(), 0)
self.assertEqual(result_intersection.size(), 0)
def execute_tests_both_linked_lists_first_empty_list(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = LinkedList()
linked_list_2: LinkedList = LinkedList()
element_2 = [6, 32, 4, 9, 6, 1, 11, 21, 1]
for i in element_2:
linked_list_2.append(i)
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
result_union_list: list = sorted(result_union.to_list())
result_intersection_list: list = sorted(result_intersection.to_list())
self.assertEqual(result_union.size(), 7)
self.assertListEqual(result_union_list, [1, 4, 6, 9, 11, 21, 32])
self.assertEqual(result_intersection.size(), 0)
self.assertListEqual(result_intersection_list, [])
def execute_tests_both_linked_lists_second_empty_list(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = LinkedList()
element_1 = [6, 32, 4, 9, 6, 1, 11, 21, 1]
for i in element_1:
linked_list_1.append(i)
linked_list_2: LinkedList = LinkedList()
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
result_union_list: list = sorted(result_union.to_list())
result_intersection_list: list = sorted(result_intersection.to_list())
self.assertEqual(result_union.size(), 7)
self.assertListEqual(result_union_list, [1, 4, 6, 9, 11, 21, 32])
self.assertEqual(result_intersection.size(), 0)
self.assertListEqual(result_intersection_list, [])
def execute_tests_both_lists_with_elements_and_overlap(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = LinkedList()
element_1 = [3, 2, 4, 35, 6, 65, 6, 4, 3, 21]
for i in element_1:
linked_list_1.append(i)
linked_list_2: LinkedList = LinkedList()
element_2 = [6, 32, 4, 9, 6, 1, 11, 21, 1]
for i in element_2:
linked_list_2.append(i)
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
result_union_list: list = result_union.to_list()
result_union_list_sorted: list = sorted(result_union_list)
result_intersection_list: list = result_intersection.to_list()
result_intersection_list_sorted: list = sorted(
result_intersection_list)
self.assertEqual(result_union.size(), 11)
self.assertListEqual(result_union_list_sorted, [1, 2, 3, 4, 6, 9, 11, 21, 32, 35, 65])
self.assertEqual(result_intersection.size(), 3)
self.assertListEqual(result_intersection_list_sorted, [4, 6, 21])
def execute_tests_both_lists_with_elements_and_no_overlap(self: object) -> None:
# Arrange
linked_list_helpers: LinkedListHelpers = LinkedListHelpers()
linked_list_1: LinkedList = LinkedList()
element_1 = [3, 2, 4, 35, 6, 65, 6, 4, 3, 21]
for i in element_1:
linked_list_1.append(i)
linked_list_2: LinkedList = LinkedList()
element_2 = [32, 9, 1, 11, 1]
for i in element_2:
linked_list_2.append(i)
# Act
result_union: LinkedList = linked_list_helpers.union(
linked_list_1, linked_list_2)
result_intersection: LinkedList = linked_list_helpers.intersection(
linked_list_1, linked_list_2)
# Assert
result_union_list: list = result_union.to_list()
result_union_list_sorted: list = sorted(result_union_list)
result_intersection_list: list = result_intersection.to_list()
result_intersection_list_sorted: list = sorted(
result_intersection_list)
self.assertEqual(result_union.size(), 11)
self.assertListEqual(result_union_list_sorted, [1, 2, 3, 4, 6, 9, 11, 21, 32, 35, 65])
self.assertEqual(result_intersection.size(), 0)
self.assertListEqual(result_intersection_list_sorted, [])
| 39.331579
| 94
| 0.670146
| 903
| 7,473
| 5.17165
| 0.065338
| 0.175589
| 0.065953
| 0.092505
| 0.96167
| 0.956103
| 0.930193
| 0.916702
| 0.904069
| 0.904069
| 0
| 0.041444
| 0.247692
| 7,473
| 189
| 95
| 39.539683
| 0.789221
| 0.020206
| 0
| 0.869231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184615
| 1
| 0.061538
| false
| 0
| 0.023077
| 0
| 0.092308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b163f890e217643fcaefe824a3c54f46a903688
| 953
|
py
|
Python
|
01_make-aws-lambda-handler-testable/python/code_final/test/test_main.py
|
jaymecd/package-aws-lambda
|
f435cb6daa68954c704d01b862c5ca466b9db9ef
|
[
"MIT"
] | null | null | null |
01_make-aws-lambda-handler-testable/python/code_final/test/test_main.py
|
jaymecd/package-aws-lambda
|
f435cb6daa68954c704d01b862c5ca466b9db9ef
|
[
"MIT"
] | null | null | null |
01_make-aws-lambda-handler-testable/python/code_final/test/test_main.py
|
jaymecd/package-aws-lambda
|
f435cb6daa68954c704d01b862c5ca466b9db9ef
|
[
"MIT"
] | null | null | null |
import sys
from unittest import mock
@mock.patch("src.services.bootstrap", autospec=True)
@mock.patch.dict("os.environ", {"COST_CENTER": "a1b2c3"})
def test__handler__success(bootstrap):
try:
# clean up modules cache
del sys.modules["src.main"]
except KeyError:
pass
bootstrap.return_value = lambda event, context: event
from src.main import handler
assert handler == bootstrap.return_value
bootstrap.assert_called_once_with(cost_center="a1b2c3")
@mock.patch("src.services.bootstrap", autospec=True)
@mock.patch.dict("os.environ", clear=True)
def test__handler__missing_envvar(bootstrap):
try:
# clean up modules cache
del sys.modules["src.main"]
except KeyError:
pass
bootstrap.return_value = lambda event, context: event
from src.main import handler
assert handler == bootstrap.return_value
bootstrap.assert_called_once_with(cost_center=None)
| 24.435897
| 59
| 0.711438
| 122
| 953
| 5.377049
| 0.360656
| 0.054878
| 0.121951
| 0.060976
| 0.82622
| 0.82622
| 0.82622
| 0.82622
| 0.82622
| 0.82622
| 0
| 0.007752
| 0.187828
| 953
| 38
| 60
| 25.078947
| 0.839793
| 0.047219
| 0
| 0.666667
| 0
| 0
| 0.113812
| 0.048619
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.083333
| false
| 0.083333
| 0.166667
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8ce67109d051433a35825633acfb54387e329887
| 147
|
py
|
Python
|
AutoDiff/__init__.py
|
autodiff-cs207/AutoDiff
|
56cb0f9be0b88f4e9c94350d53dfff689be8424f
|
[
"MIT"
] | 1
|
2018-10-18T16:10:02.000Z
|
2018-10-18T16:10:02.000Z
|
AutoDiff/__init__.py
|
autodiff-cs207/cs207-FinalProject
|
56cb0f9be0b88f4e9c94350d53dfff689be8424f
|
[
"MIT"
] | 1
|
2018-11-28T17:20:11.000Z
|
2018-12-03T15:50:33.000Z
|
AutoDiff/__init__.py
|
autodiff-cs207/cs207-FinalProject
|
56cb0f9be0b88f4e9c94350d53dfff689be8424f
|
[
"MIT"
] | null | null | null |
from AutoDiff.ad import Variable
from AutoDiff.ad import DiffObj
from AutoDiff.ad import MathOps
from AutoDiff.ad import VectorFunction
import math
| 29.4
| 38
| 0.857143
| 22
| 147
| 5.727273
| 0.409091
| 0.380952
| 0.444444
| 0.634921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115646
| 147
| 5
| 39
| 29.4
| 0.969231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0fda1a1672176986447501beeda230a1277a34a1
| 184
|
py
|
Python
|
stibium/_fbclient.py
|
szymonszl/stibium
|
bf9a7756edd8dcaeb65f1afdacbf8f4780827d00
|
[
"BSD-3-Clause"
] | null | null | null |
stibium/_fbclient.py
|
szymonszl/stibium
|
bf9a7756edd8dcaeb65f1afdacbf8f4780827d00
|
[
"BSD-3-Clause"
] | null | null | null |
stibium/_fbclient.py
|
szymonszl/stibium
|
bf9a7756edd8dcaeb65f1afdacbf8f4780827d00
|
[
"BSD-3-Clause"
] | null | null | null |
"""This class provides the hooks to the standard fbchat Client class"""
import fbchat
from .dataclasses import Thread, Message, Reaction
class Client(fbchat.Client):
pass # FIXME
| 26.285714
| 71
| 0.766304
| 25
| 184
| 5.64
| 0.68
| 0.170213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157609
| 184
| 7
| 72
| 26.285714
| 0.909677
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ba1a673ee2b6bf7996127e4cbd04e6196314403d
| 12,423
|
py
|
Python
|
asana/resources/gen/custom_fields.py
|
FiyaFly/python-asana
|
ef9e6ff3e82e9f1ca18d526401f524698c7215c7
|
[
"MIT"
] | 266
|
2015-02-13T18:14:08.000Z
|
2022-03-29T22:03:33.000Z
|
asana/resources/gen/custom_fields.py
|
FiyaFly/python-asana
|
ef9e6ff3e82e9f1ca18d526401f524698c7215c7
|
[
"MIT"
] | 77
|
2015-02-13T00:22:11.000Z
|
2022-02-20T07:56:14.000Z
|
asana/resources/gen/custom_fields.py
|
FiyaFly/python-asana
|
ef9e6ff3e82e9f1ca18d526401f524698c7215c7
|
[
"MIT"
] | 95
|
2015-03-18T23:28:57.000Z
|
2022-02-20T23:28:58.000Z
|
# coding=utf-8
class _CustomFields:
def __init__(self, client=None):
self.client = client
def create_custom_field(self, params=None, **options):
"""Create a custom field
:param Object params: Parameters for the request
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/custom_fields"
return self.client.post(path, params, **options)
def create_enum_option_for_custom_field(self, custom_field_gid, params=None, **options):
"""Create an enum option
:param str custom_field_gid: (required) Globally unique identifier for the custom field.
:param Object params: Parameters for the request
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/custom_fields/{custom_field_gid}/enum_options".replace("{custom_field_gid}", custom_field_gid)
return self.client.post(path, params, **options)
def delete_custom_field(self, custom_field_gid, params=None, **options):
"""Delete a custom field
:param str custom_field_gid: (required) Globally unique identifier for the custom field.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/custom_fields/{custom_field_gid}".replace("{custom_field_gid}", custom_field_gid)
return self.client.delete(path, params, **options)
def get_custom_field(self, custom_field_gid, params=None, **options):
"""Get a custom field
:param str custom_field_gid: (required) Globally unique identifier for the custom field.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/custom_fields/{custom_field_gid}".replace("{custom_field_gid}", custom_field_gid)
return self.client.get(path, params, **options)
def get_custom_fields_for_workspace(self, workspace_gid, params=None, **options):
"""Get a workspace's custom fields
:param str workspace_gid: (required) Globally unique identifier for the workspace or organization.
:param Object params: Parameters for the request
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/workspaces/{workspace_gid}/custom_fields".replace("{workspace_gid}", workspace_gid)
return self.client.get_collection(path, params, **options)
def insert_enum_option_for_custom_field(self, custom_field_gid, params=None, **options):
"""Reorder a custom field's enum
:param str custom_field_gid: (required) Globally unique identifier for the custom field.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/custom_fields/{custom_field_gid}/enum_options/insert".replace("{custom_field_gid}", custom_field_gid)
return self.client.post(path, params, **options)
def update_custom_field(self, custom_field_gid, params=None, **options):
"""Update a custom field
:param str custom_field_gid: (required) Globally unique identifier for the custom field.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/custom_fields/{custom_field_gid}".replace("{custom_field_gid}", custom_field_gid)
return self.client.put(path, params, **options)
def update_enum_option(self, enum_option_gid, params=None, **options):
"""Update an enum option
:param str enum_option_gid: (required) Globally unique identifier for the enum option.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/enum_options/{enum_option_gid}".replace("{enum_option_gid}", enum_option_gid)
return self.client.put(path, params, **options)
| 101
| 517
| 0.724785
| 1,837
| 12,423
| 4.835602
| 0.083832
| 0.050771
| 0.039401
| 0.024316
| 0.948103
| 0.936395
| 0.927502
| 0.918271
| 0.907126
| 0.891591
| 0
| 0.001337
| 0.217339
| 12,423
| 122
| 518
| 101.827869
| 0.91227
| 0.761008
| 0
| 0.55814
| 0
| 0
| 0.166941
| 0.11102
| 0
| 0
| 0
| 0
| 0
| 1
| 0.209302
| false
| 0
| 0
| 0
| 0.418605
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba209501e2063a347b9e1c506ede41d98dc2fc2f
| 3,238
|
py
|
Python
|
sunpy/timeseries/sources/tests/test_goes.py
|
Octaves0911/sunpy
|
d3dff03fe6cc404e40f22da90200ffbb3d38c1a7
|
[
"BSD-2-Clause"
] | 1
|
2019-03-11T12:28:25.000Z
|
2019-03-11T12:28:25.000Z
|
sunpy/timeseries/sources/tests/test_goes.py
|
Octaves0911/sunpy
|
d3dff03fe6cc404e40f22da90200ffbb3d38c1a7
|
[
"BSD-2-Clause"
] | 10
|
2017-08-10T07:55:42.000Z
|
2020-04-19T10:56:43.000Z
|
sunpy/timeseries/sources/tests/test_goes.py
|
Octaves0911/sunpy
|
d3dff03fe6cc404e40f22da90200ffbb3d38c1a7
|
[
"BSD-2-Clause"
] | 1
|
2019-02-06T11:57:56.000Z
|
2019-02-06T11:57:56.000Z
|
import pytest
import sunpy.timeseries
from sunpy.data.test import get_test_filepath
goes_filepath = get_test_filepath('go1520110607.fits')
goes_filepath_com = get_test_filepath('go1520120601.fits.gz')
new_goes15_filepath = get_test_filepath('goes_truncated_test_goes15.nc')
new_goes17_filepath = get_test_filepath('goes_truncated_test_goes17.nc')
def test_implicit_goes():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(goes_filepath)
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_implicit_goes_com():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(goes_filepath_com)
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_implicit_new_goes15():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(new_goes15_filepath)
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_implicit_new_goes17():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(new_goes17_filepath)
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_implicit_goes_satno():
# Test a GOES TimeSeries for satellite number
ts_goes = sunpy.timeseries.TimeSeries(goes_filepath)
assert ts_goes.observatory == 'GOES-15'
def test_implicit_new_goes15_satno():
# Test a GOES TimeSeries for satellite number
ts_goes = sunpy.timeseries.TimeSeries(new_goes15_filepath)
assert ts_goes.observatory == 'GOES-15'
def test_implicit_new_goes17_satno():
# Test a GOES TimeSeries for satellite number
ts_goes = sunpy.timeseries.TimeSeries(new_goes17_filepath)
assert ts_goes.observatory == 'GOES-17'
def test_implicit_goes_satno_missing():
# Test a GOES TimeSeries for a missing satellite number
ts_goes = sunpy.timeseries.TimeSeries(new_goes17_filepath)
del ts_goes.meta.metas[0]['id']
assert ts_goes.observatory is None
def test_goes():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(goes_filepath, source='XRS')
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_goes_com():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(goes_filepath_com, source='XRS')
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_new_goes15():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(new_goes15_filepath, source='XRS')
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
def test_new_goes16():
# Test a GOES TimeSeries
ts_goes = sunpy.timeseries.TimeSeries(new_goes17_filepath, source='XRS')
assert isinstance(ts_goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
@pytest.mark.remote_data
def test_goes_remote():
# Older format file
goes = sunpy.timeseries.TimeSeries(
'https://umbra.nascom.nasa.gov/goes/fits/1986/go06860129.fits')
assert isinstance(goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
# Newer format
goes = sunpy.timeseries.TimeSeries(
'https://umbra.nascom.nasa.gov/goes/fits/2018/go1520180626.fits')
assert isinstance(goes, sunpy.timeseries.sources.goes.XRSTimeSeries)
| 34.084211
| 76
| 0.773626
| 431
| 3,238
| 5.568445
| 0.141531
| 0.15625
| 0.19
| 0.175
| 0.84
| 0.813333
| 0.804583
| 0.77125
| 0.769583
| 0.712083
| 0
| 0.031094
| 0.135886
| 3,238
| 94
| 77
| 34.446809
| 0.826662
| 0.123533
| 0
| 0.403846
| 0
| 0
| 0.089267
| 0.020546
| 0
| 0
| 0
| 0
| 0.269231
| 1
| 0.25
| false
| 0
| 0.057692
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e8857c64fb513859bf4f9dc01e1bd86835e98be7
| 61,972
|
py
|
Python
|
demoproject/tests.py
|
everjs0713/MTK_EA_REPORT
|
699a69e5149a139fbc3c79be2f27ecea58c0a6f0
|
[
"BSD-2-Clause"
] | 215
|
2016-07-31T14:31:31.000Z
|
2022-03-18T21:50:06.000Z
|
demoproject/tests.py
|
everjs0713/MTK_EA_REPORT
|
699a69e5149a139fbc3c79be2f27ecea58c0a6f0
|
[
"BSD-2-Clause"
] | 43
|
2016-07-28T10:10:31.000Z
|
2020-12-07T11:03:01.000Z
|
demoproject/tests.py
|
everjs0713/MTK_EA_REPORT
|
699a69e5149a139fbc3c79be2f27ecea58c0a6f0
|
[
"BSD-2-Clause"
] | 63
|
2016-07-28T09:57:05.000Z
|
2021-05-24T11:03:15.000Z
|
import sys
from django.test import TestCase, override_settings
from django.db.models import Avg, Sum
from chartit import PivotDataPool, DataPool, Chart, PivotChart
from chartit.exceptions import APIInputError
from chartit.templatetags import chartit
from chartit.validation import clean_pdps, clean_dps, clean_pcso, clean_cso
from demoproject.models import SalesHistory, MonthlyWeatherByCity, \
MonthlyWeatherSeattle
from utils import assertOptionDictsEqual
TestCase.assertOptionDictsEqual = assertOptionDictsEqual
class GoodPivotSeriesDictInputTests(TestCase):
def test_all_terms(self):
series_input = \
{'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_categories_is_a_str(self):
series_input = \
{'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': 'bookstore__city__state',
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'}}}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_legend_by_is_a_str(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'legend_by': 'book__genre__name',
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_no_legend_by(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city'
}
}
}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'legend_by': (),
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_no_top_n_per_cat(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 0,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_no_field_aliases(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5
}
}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_custom_field_aliases(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'St',
'bookstore__city__city': 'Cty',
'book__genre__name': 'Genre'
}
}
}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'St',
'bookstore__city__city': 'Cty',
'book__genre__name': 'Genre'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_partial_field_aliases(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'St'
}
}
}
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'St',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
class BadPivotSeriesDictInputTests(TestCase):
def test_series_not_dict_or_list(self):
series_input = 'foobar'
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_func_dict_wrong_type(self):
series_input = {'avg_price': 'foobar'}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_source_missing(self):
series_input = \
{'avg_price': {
'func': Avg('price'),
'categories': ['bookstore__city__state', 'bookstore__city__city'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_source_wrong_type(self):
series_input = \
{'avg_price': {
'source': 'foobar',
'func': Avg('price'),
'categories': ['bookstore__city__state', 'bookstore__city__city'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_func_missing(self):
series_input = \
{'avg_price': {
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state', 'bookstore__city__city'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_func_wrong_type(self):
series_input = \
{'avg_price': {
'source': SalesHistory.objects.all(),
'func': 'foobar',
'categories': ['bookstore__city__state', 'bookstore__city__city'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_categories_missing(self):
series_input = \
{'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_categories_wrong_type(self):
series_input = \
{'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': 0,
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'}}}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_categories_not_a_valid_field(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['foobar'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_categories_empty_list(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': [],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_legend_by_wrong_type(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': 'foobar',
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'legend_by': 10,
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_legend_by_not_a_valid_field(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': 'foobar',
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'legend_by': ['foobar'],
'top_n_per_cat': 5,
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_top_n_per_cat_wrong_type(self):
series_input = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': 'foobar',
'categories': [
'bookstore__city__state',
'bookstore__city__city',
],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 'foobar',
'field_aliases': {
'bookstore__city__state': 'state',
'bookstore__city__city': 'city',
'book__genre__name': 'name'
}
}
}
self.assertRaises(APIInputError, clean_pdps, series_input)
class GoodPivotSeriesListInputTests(TestCase):
def test_all_terms(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
},
'avg_price_all': {
'func': Avg('price'),
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state'],
'legend_by': (),
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_source_a_manager(self):
series_input = [{
'options': {
'source': SalesHistory.objects,
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
},
'avg_price_all': {
'func': Avg('price'),
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state'],
'legend_by': (),
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_source_a_model(self):
series_input = [{
'options': {
'source': SalesHistory,
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
},
'avg_price_all': {
'func': Avg('price'),
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state'],
'legend_by': (),
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_term_opts_an_aggr(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price')
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_term_opts_a_dict(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': {
'func': Avg('price'),
'top_n_per_cat': 3
}
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 3,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_opts_empty(self):
series_input = [{
'options': {},
'terms': {
'avg_price': {
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state'],
'func': Avg('price'),
'top_n_per_cat':3
}
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': (),
'top_n_per_cat': 3,
'field_aliases': {
'bookstore__city__state': 'state'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_categories_a_str(self):
series_input = [{
'options': {},
'terms': {
'avg_price': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'func': Avg('price'),
'top_n_per_cat': 3
}
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': (),
'top_n_per_cat': 3,
'field_aliases': {
'bookstore__city__state': 'state'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_legend_by_a_str(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': ['bookstore__city__state'],
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price')
}
}]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
def test_multiple_dicts(self):
series_input = [
{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price')
}
}, {
'options': {
'source': SalesHistory.objects.filter(price__gte=10),
'categories': 'bookstore__city__city',
'top_n_per_cat': 2
},
'terms': {
'avg_price_high': {
'func': Avg('price'),
'legend_by': None
}
}
}
]
series_cleaned = {
'avg_price': {
'source': SalesHistory.objects.all(),
'func': Avg('price'),
'categories': ['bookstore__city__state'],
'legend_by': ['book__genre__name'],
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__state': 'state',
'book__genre__name': 'name'
}
},
'avg_price_high': {
'func': Avg('price'),
'source': SalesHistory.objects.filter(price__gte=10),
'categories': ['bookstore__city__city'],
'legend_by': (),
'top_n_per_cat': 2,
'field_aliases': {
'bookstore__city__city': 'city'
}
}
}
self.assertOptionDictsEqual(clean_pdps(series_input),
series_cleaned)
class BadPivotSeriesListInputTests(TestCase):
def test_terms_empty(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_terms_missing(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_terms_a_list_not_a_dict(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': [{
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}]
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_source_missing(self):
series_input = [{
'options': {
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_options_missing(self):
series_input = [{
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_options_empty(self):
series_input = [{
'options': {},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_source_wrong_type(self):
series_input = [{
'options': {
'source': 'foobar',
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_categories_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 10,
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_categories_not_a_field(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'foobar',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_legend_by_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 10,
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_legend_by_not_a_field(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'foobar',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_term_func_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': 'foobar',
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_term_dict_func_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': 'foobar',
'legend_by': None
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
def test_term_dict_legend_by_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': 10
}
}
}]
self.assertRaises(APIInputError, clean_pdps, series_input)
class GoodDataSeriesListInputTests(TestCase):
def test_all_terms(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': [
'price',
{
'genre': {
'field': 'book__genre__name',
'source': SalesHistory.objects.filter(price__gte=10),
'field_alias': 'gnr'
}
}
]
}]
series_cleaned = {
'price': {
'source': SalesHistory.objects.all(),
'field': 'price',
'field_alias': 'price'
},
'genre': {
'source': SalesHistory.objects.filter(price__gte=10),
'field': 'book__genre__name',
'field_alias': 'gnr'
}
}
self.assertOptionDictsEqual(clean_dps(series_input),
series_cleaned)
def test_terms_list_all_str(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': ['price', 'book__genre__name']
}]
series_cleaned = {
'price': {
'source': SalesHistory.objects.all(),
'field': 'price',
'field_alias': 'price'
},
'book__genre__name': {
'source': SalesHistory.objects.all(),
'field': 'book__genre__name',
'field_alias': 'name'
}
}
self.assertOptionDictsEqual(clean_dps(series_input),
series_cleaned)
def test_terms_is_a_dict(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': {'price': {}}
}]
series_cleaned = {
'price': {
'source': SalesHistory.objects.all(),
'field': 'price',
'field_alias': 'price'
}
}
self.assertOptionDictsEqual(clean_dps(series_input),
series_cleaned)
def test_terms_is_a_list_of_tuples_w_lambda(self):
_l = lambda x: -x # noqa
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': [('price', _l)]
}]
series_cleaned = {
'price': {
'source': SalesHistory.objects.all(),
'field': 'price',
'field_alias': 'price',
'fn': _l
}
}
self.assertOptionDictsEqual(clean_dps(series_input),
series_cleaned)
def test_terms_is_a_list_of_tuples_containing_dict_and_lambda(self):
_l = lambda x: -x # noqa
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': [({'price-x': 'price'}, _l)]
}]
series_cleaned = {
'price-x': {
'source': SalesHistory.objects.all(),
'field': 'price',
'field_alias': 'price-x',
'fn': _l
}
}
self.assertOptionDictsEqual(clean_dps(series_input),
series_cleaned)
def test_multiple_dicts(self):
series_input = [
{
'options': {
'source': SalesHistory.objects.all()
},
'terms': ['price']
}, {
'options': {
'source': SalesHistory.objects.filter(price__gte=10)
},
'terms': {
'genre': {
'field': 'book__genre__name',
'field_alias': 'gnr'
}
}
}
]
series_cleaned = {
'price': {
'source': SalesHistory.objects.all(),
'field': 'price',
'field_alias': 'price'
},
'genre': {
'source': SalesHistory.objects.filter(price__gte=10),
'field': 'book__genre__name',
'field_alias': 'gnr'
}
}
self.assertOptionDictsEqual(clean_dps(series_input),
series_cleaned)
class BadDataSeriesListInputTests(TestCase):
def test_source_missing(self):
series_input = [{
'options': {},
'terms': [
'price', {
'genre': {
'field': 'book__genre__name',
'source': SalesHistory.objects.filter(price__gte=10),
'field_alias': 'gnr'
}
}
]
}]
self.assertRaises(APIInputError, clean_dps, series_input)
def test_source_wrong_type(self):
series_input = [{
'options': {
'source': 'foobar'
},
'terms': [
'price', {
'genre': {
'field': 'book__genre__name',
'source': SalesHistory.objects.filter(price__gte=10),
'field_alias': 'gnr'
}
}
]
}]
self.assertRaises(APIInputError, clean_dps, series_input)
def test_series_terms_empty(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': []
}]
self.assertRaises(APIInputError, clean_dps, series_input)
def test_series_terms_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': 'foobar'
}]
self.assertRaises(APIInputError, clean_dps, series_input)
def test_terms_element_wrong_type(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': [10]
}]
self.assertRaises(APIInputError, clean_dps, series_input)
def test_terms_element_not_a_field(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all()
},
'terms': [
'foobar', {
'genre': {
'field': 'book__genre__name',
'source': SalesHistory.objects.filter(price__gte=10),
'field_alias': 'gnr'
}
}
]
}]
self.assertRaises(APIInputError, clean_dps, series_input)
class GoodPivotChartOptionsTests(TestCase):
def setUp(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.ds = PivotDataPool(series_input)
def test_all_terms(self):
pcso_input = [{
'options': {
'type': 'column'
},
'terms': [
'avg_price',
{
'avg_price_all': {
'type': 'area'
}
}
]
}]
series_cleaned = {
'avg_price': {
'type': 'column'
},
'avg_price_all': {
'type': 'area'
}
}
self.assertOptionDictsEqual(clean_pcso(pcso_input, self.ds),
series_cleaned)
class BadPivotChartOptionsTests(TestCase):
def setUp(self):
series_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__state',
'legend_by': 'book__genre__name',
'top_n_per_cat': 2
},
'terms': {
'avg_price': Avg('price'),
'avg_price_all': {
'func': Avg('price'),
'legend_by': None
}
}
}]
self.ds = PivotDataPool(series_input)
def test_term_not_in_pdps(self):
pcso_input = [{
'options': {
'type': 'column'
},
'terms': [
'foobar',
{
'avg_price_all': {
'type': 'area'
}
}
]
}]
self.assertRaises(APIInputError, clean_pcso, pcso_input, self.ds)
def test_opts_missing(self):
pcso_input = \
[{'terms': [
'avg_price',
{'avg_price_all': {
'type': 'area'}}]}
]
self.assertRaises(APIInputError, clean_pcso, pcso_input, self.ds)
def test_opts_wrong_type(self):
pcso_input = \
[{'options': 0,
'terms': [
'avg_price',
{'avg_price_all': {
'type': 'area'}}]}
]
self.assertRaises(APIInputError, clean_pcso, pcso_input, self.ds)
def test_terms_missing(self):
pcso_input = \
[{'opts': {
'type': 'column'}}]
self.assertRaises(APIInputError, clean_pcso, pcso_input, self.ds)
def test_terms_a_dict_not_a_list(self):
pcso_input = \
[{'options': {
'type': 'column'},
'terms':
{'avg_price_all': {
'type': 'area'}}}]
self.assertRaises(APIInputError, clean_pcso, pcso_input, self.ds)
def test_terms_a_str(self):
pcso_input = \
[{'options': {
'type': 'column'},
'terms':
'foobar'}]
self.assertRaises(APIInputError, clean_pcso, pcso_input, self.ds)
class GoodChartOptionsTests(TestCase):
def setUp(self):
series_input = \
[{'options': {
'source': MonthlyWeatherByCity.objects.all()},
'terms': [
'month',
'boston_temp',
'houston_temp',
'new_york_temp']},
{'options': {
'source': MonthlyWeatherSeattle.objects.all()},
'terms': [
{'month_seattle': 'month'},
'seattle_temp']
}]
self.ds = DataPool(series_input)
def test_all_terms(self):
so_input = [{
'options': {
'type': 'column'
},
'terms': {
'month': [
'boston_temp', {
'new_york_temp': {
'type': 'area',
'xAxis': 1
}
}
],
'month_seattle': ['seattle_temp']
}
}]
so_cleaned = {
'boston_temp': {
'_x_axis_term': 'month',
'type': 'column'
},
'new_york_temp': {
'_x_axis_term': 'month',
'type': 'area',
'xAxis': 1
},
'seattle_temp': {
'_x_axis_term': 'month_seattle',
'type': 'column'
}
}
self.assertOptionDictsEqual(clean_cso(so_input, self.ds),
so_cleaned)
def test_all_terms_str(self):
so_input = [{
'options': {
'type': 'column'
},
'terms': {
'month': ['boston_temp', 'new_york_temp']
}
}]
so_cleaned = {
'boston_temp': {
'_x_axis_term': 'month',
'type': 'column'
},
'new_york_temp': {
'_x_axis_term': 'month',
'type': 'column'
}
}
self.assertOptionDictsEqual(clean_cso(so_input, self.ds),
so_cleaned)
def test_all_terms_dict(self):
so_input = [
{
'options': {'type': 'column'},
'terms': {
'month': [{
'boston_temp': {
'type': 'area',
'xAxis': 1
}}, {
'new_york_temp': {
'xAxis': 0
}}
]
}
}
]
so_cleaned = {
'boston_temp': {
'_x_axis_term': 'month',
'type': 'area',
'xAxis': 1
},
'new_york_temp': {
'_x_axis_term': 'month',
'type': 'column',
'xAxis': 0
}
}
self.assertOptionDictsEqual(clean_cso(so_input, self.ds),
so_cleaned)
def test_multiple_items_in_list(self):
so_input = [
{
'options': {
'type': 'column'
},
'terms': {
'month': ['boston_temp', 'new_york_temp']
}
}, {
'options': {
'type': 'area'
},
'terms': {
'month_seattle': ['seattle_temp']
}
}
]
so_cleaned = {
'boston_temp': {
'_x_axis_term': 'month',
'type': 'column'
},
'new_york_temp': {
'_x_axis_term': 'month',
'type': 'column'
},
'seattle_temp': {
'_x_axis_term': 'month_seattle',
'type': 'area'
}
}
self.assertOptionDictsEqual(clean_cso(so_input, self.ds),
so_cleaned)
class BadChartOptionsTests(TestCase):
def setUp(self):
series_input = \
[{'options': {
'source': MonthlyWeatherByCity.objects.all()},
'terms': [
'month',
'boston_temp',
'houston_temp',
'new_york_temp']},
{'options': {
'source': MonthlyWeatherSeattle.objects.all()},
'terms': [
{'month_seattle': 'month'},
'seattle_temp']
}]
self.ds = DataPool(series_input)
def test_options_missing(self):
so_input = [{
'terms': {
'month': [
'boston_temp', {
'new_york_temp': {
'type': 'area',
'xAxis': 1
}
}
],
'month_seattle': ['seattle_temp']
}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_options_wrong_type(self):
so_input = [{
'options': 10,
'terms': {
'month': [
'boston_temp', {
'new_york_temp': {
'type': 'area',
'xAxis': 1
}
}
],
'month_seattle': ['seattle_temp']
}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_terms_missing(self):
so_input = \
[{'options': {
'type': 'line'}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_terms_wrong_type(self):
so_input = \
[{'options': {
'type': 'line'},
'terms': 10
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_terms_a_list_not_a_dict(self):
so_input = \
[{'options': {
'type': 'line'},
'terms': [{
'month': ['new_york_temp']}]
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_terms_empty(self):
so_input = \
[{'options': {
'type': 'line'},
'terms': {}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_yterms_not_in_ds(self):
so_input = \
[{'options': {
'type': 'column'},
'terms': {
'month': [
'foobar']}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_xterms_not_in_ds(self):
so_input = \
[{'options': {
'type': 'column'},
'terms': {
'foobar': [
'seattle_temp']}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
def test_yterms_not_a_list(self):
so_input = \
[{'options': {
'type': 'column'},
'terms': {
'month': 'new_york_temp'}
}]
self.assertRaises(APIInputError, clean_cso, so_input, self.ds)
class ChartitTemplateTagTests(TestCase):
def test_load_charts_with_None_chart(self):
html = chartit.load_charts(None, 'my_chart')
self.assertIn('<script type="text/javascript">', html)
self.assertIn('var _chartit_hco_array = ();', html)
self.assertIn('<script src="/static/chartit/js/chartloader.js" type="text/javascript">', html) # noqa
def test_load_charts_with_missing_render_to(self):
chart_data = DataPool(series=[{'options': {
'source': SalesHistory.objects.all()},
'terms': ['price', 'sale_date']
}])
chart = Chart(
datasource=chart_data,
series_options=[{
'options': {
'type': 'column',
'stacking': False
},
'terms': {'sale_date': ['price']}}])
html = chartit.load_charts(chart, '')
self.assertIn('<script type="text/javascript">', html)
self.assertIn('{"renderTo": "container"}', html)
self.assertIn('"title": {"text": "Price vs. Sale Date"}', html)
self.assertIn('<script src="/static/chartit/js/chartloader.js" type="text/javascript">', html) # noqa
def test_load_charts_with_single_chart(self):
chart_data = DataPool(series=[{'options': {
'source': SalesHistory.objects.all()},
'terms': ['price', 'sale_date']
}])
chart = Chart(
datasource=chart_data,
series_options=[{
'options': {
'type': 'column',
'stacking': False
},
'terms': {'sale_date': ['price']}}])
html = chartit.load_charts(chart, 'my_chart')
self.assertIn('<script type="text/javascript">', html)
self.assertIn('"stacking": false', html)
self.assertIn('"type": "column"', html)
self.assertIn('"name": "price"', html)
self.assertIn('{"renderTo": "my_chart"}', html)
self.assertIn('"title": {"text": "Price vs. Sale Date"}', html)
self.assertIn('<script src="/static/chartit/js/chartloader.js" type="text/javascript">', html) # noqa
def test_load_charts_with_two_charts(self):
chart_data = DataPool(series=[{'options': {
'source': SalesHistory.objects.all()},
'terms': ['price', 'sale_date']
}])
chart = Chart(
datasource=chart_data,
series_options=[{
'options': {
'type': 'column',
'stacking': False
},
'terms': {'sale_date': ['price']}}])
pivot_input = [{
'options': {
'source': SalesHistory.objects.all(),
'categories': 'bookstore__city__city',
},
'terms': {
'avg_price': Avg('price'),
}
}]
pivot_chart_data = PivotDataPool(pivot_input)
pivot_chart = PivotChart(
datasource=pivot_chart_data,
series_options=[{
'options': {
'type': 'column',
'stacking': False
},
'terms': ['avg_price']
}]
)
html = chartit.load_charts([chart, pivot_chart],
'my_chart,my_pivot_chart')
self.assertIn('<script type="text/javascript">', html)
self.assertIn('"stacking": false', html)
self.assertIn('"type": "column"', html)
self.assertIn('"name": "price"', html)
# the first chart
self.assertIn('{"renderTo": "my_chart"}', html)
self.assertIn('"title": {"text": "Price vs. Sale Date"}', html)
# the second chart
self.assertIn('{"renderTo": "my_pivot_chart"}', html)
self.assertIn('"title": {"text": "Avg_Price vs. City"}', html)
self.assertIn('<script src="/static/chartit/js/chartloader.js" type="text/javascript">', html) # noqa
def test_sortf_mapf_mts_with_data(self):
"""
Test that PivotChart loads when there is actual data and
sortf_mapf_mtp is specified!
"""
def region_state(x):
region = {'CA': 'S', 'MA': 'N', 'TX': 'S', 'NY': 'N'}
return (region[x[0]], x[1])
ds = PivotDataPool(
series=[{
'options': {
'source': SalesHistory.objects.all(),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': 'book__genre__name'
},
'terms': {
'tot_sales': Sum('sale_qty')
}
}],
sortf_mapf_mts=(None, region_state, True)
)
chart = PivotChart(
datasource=ds,
series_options=[{
'options': {
'type': 'column',
'stacking': True
},
'terms': ['tot_sales']
}]
)
# just make sure this renders fine w/o errors
html = chartit.load_charts([chart], 'my_chart')
self.assertNotEqual(html, '')
def test_sortf_mapf_mts_without_data(self):
"""
Test that PivotChart loads when the QuerySet returns empty data and
sortf_mapf_mtp is specified!
"""
def region_state(x):
region = {'CA': 'S', 'MA': 'N', 'TX': 'S', 'NY': 'N'}
return (region[x[0]], x[1])
ds = PivotDataPool(
series=[{
'options': {
'source': SalesHistory.objects.filter(
bookstore__city__city='TEST CITY'),
'categories': [
'bookstore__city__state',
'bookstore__city__city'
],
'legend_by': 'book__genre__name'
},
'terms': {
'tot_sales': Sum('sale_qty')
}
}],
sortf_mapf_mts=(None, region_state, True)
)
chart = PivotChart(
datasource=ds,
series_options=[{
'options': {
'type': 'column',
'stacking': True
},
'terms': ['tot_sales']
}]
)
# just make sure this renders fine w/o errors
html = chartit.load_charts([chart], 'my_chart')
self.assertNotEqual(html, '')
class ChartitJSRelPathTests(TestCase):
"""
Test the CHARTIT_JS_REL_PATH setting.
"""
def setUp(self):
super(ChartitJSRelPathTests, self).setUp()
# force chartit module to reload
# b/c this setting is evaluated near the top
for mod in ['chartit.templatetags.chartit',
'chartit.templatetags',
'chartit']:
if mod in sys.modules:
del sys.modules[mod]
@override_settings(CHARTIT_JS_REL_PATH='chartit/js/')
def test_setting_starting_without_slash(self):
from chartit.templatetags import chartit
html = chartit.load_charts(None, 'my_chart')
self.assertIn('<script type="text/javascript">', html)
self.assertIn('var _chartit_hco_array = ();', html)
self.assertIn('<script src="/static/chartit/js/chartloader.js" type="text/javascript">', html) # noqa
@override_settings(CHARTIT_JS_REL_PATH='/chartit/js/')
def test_setting_starting_with_slash(self):
from chartit.templatetags import chartit
html = chartit.load_charts(None, 'my_chart')
self.assertIn('<script type="text/javascript">', html)
self.assertIn('var _chartit_hco_array = ();', html)
self.assertIn('<script src="/static/chartit/js/chartloader.js" type="text/javascript">', html) # noqa
| 33.122394
| 109
| 0.443507
| 4,911
| 61,972
| 5.148239
| 0.046019
| 0.053791
| 0.068346
| 0.086382
| 0.932326
| 0.922478
| 0.909465
| 0.896136
| 0.880433
| 0.861607
| 0
| 0.003118
| 0.435874
| 61,972
| 1,870
| 110
| 33.140107
| 0.72008
| 0.007358
| 0
| 0.747227
| 0
| 0
| 0.228841
| 0.06063
| 0
| 0
| 0
| 0
| 0.063631
| 1
| 0.053123
| false
| 0
| 0.006421
| 0
| 0.067717
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e895a4a18d03b1641a4b2e89cc79a34b94708c4a
| 50,061
|
py
|
Python
|
tripleoclient/tests/v1/undercloud/test_install_upgrade.py
|
merouaneagar/python-tripleoclient
|
de0fcbfcbda08128b1cdda5255aca65a831ce45f
|
[
"Apache-2.0"
] | null | null | null |
tripleoclient/tests/v1/undercloud/test_install_upgrade.py
|
merouaneagar/python-tripleoclient
|
de0fcbfcbda08128b1cdda5255aca65a831ce45f
|
[
"Apache-2.0"
] | null | null | null |
tripleoclient/tests/v1/undercloud/test_install_upgrade.py
|
merouaneagar/python-tripleoclient
|
de0fcbfcbda08128b1cdda5255aca65a831ce45f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import fixtures
import json
import mock
import os
import sys
from jinja2 import Template
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from tripleoclient.tests.v1.test_plugin import TestPluginV1
# Load the plugin init module for the plugin list and show commands
from tripleoclient.v1 import undercloud
class FakePluginV1Client(object):
def __init__(self, **kwargs):
self.auth_token = kwargs['token']
self.management_url = kwargs['endpoint']
class TestUndercloudInstall(TestPluginV1):
def setUp(self):
super(TestUndercloudInstall, self).setUp()
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
self.conf.config(container_images_file='/home/stack/foo.yaml')
self.conf.set_default('output_dir', '/home/stack')
# setting this so we don't have to mock get_local_timezone everywhere
self.conf.set_default('undercloud_timezone', 'UTC')
# don't actually load config from ~/undercloud.conf
self.mock_config_load = self.useFixture(
fixtures.MockPatch('tripleoclient.utils.load_config'))
# Get the command object to test
app_args = mock.Mock()
app_args.verbose_level = 1
self.cmd = undercloud.InstallUndercloud(self.app, app_args)
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('six.moves.builtins.open')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
def test_undercloud_install_default(self, mock_subprocess,
mock_wr,
mock_os, mock_copy,
mock_open, mock_user, mock_getuid):
arglist = ['--no-validations']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.makedirs', return_value=None)
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
def test_undercloud_install_with_heat_customized(self, mock_subprocess,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid):
self.conf.config(output_dir='/foo')
self.conf.config(templates='/usertht')
self.conf.config(heat_native='false')
self.conf.config(roles_file='foo/roles.yaml')
arglist = ['--no-validations', '--force-stack-update']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_os.assert_has_calls(
[
mock.call('/foo/tripleo-config-generated-env-files'),
mock.call('/foo')
])
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usertht',
'--roles-file=foo/roles.yaml',
'--networks-file=/usertht/network_data_undercloud.yaml',
'--heat-native=False', '-e',
'/usertht/environments/undercloud.yaml', '-e',
'/usertht/environments/use-dns-for-vips.yaml', '-e',
'/usertht/environments/podman.yaml', '-e',
'/home/stack/foo.yaml', '-e',
'/usertht/environments/services/ironic.yaml', '-e',
'/usertht/environments/services/ironic-inspector.yaml', '-e',
'/usertht/environments/services/'
'undercloud-remove-novajoin.yaml', '-e',
'/usertht/environments/'
'disable-telemetry.yaml', '-e',
'/usertht/environments/services/tempest.yaml', '-e',
'/usertht/environments/public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usertht/environments/ssl/tls-endpoints-public-ip.yaml', '-e',
'/usertht/environments/services/undercloud-haproxy.yaml', '-e',
'/usertht/environments/services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/foo', '--cleanup', '-e',
'/foo/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usertht/undercloud-stack-vstate-dropin.yaml',
'--force-stack-update'])
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('tripleoclient.v1.undercloud_config.'
'_generate_masquerade_networks', autospec=True)
@mock.patch('tripleoclient.v1.undercloud_config.'
'_generate_subnets_static_routes', autospec=True)
@mock.patch('tripleoclient.v1.undercloud_config.'
'_get_jinja_env_source', autospec=True)
@mock.patch('tripleoclient.v1.undercloud_config.'
'_get_unknown_instack_tags', return_value=None, autospec=True)
@mock.patch('jinja2.meta.find_undeclared_variables', return_value={},
autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
def test_undercloud_install_with_heat_net_conf_over(self, mock_subprocess,
mock_j2_meta,
mock_get_unknown_tags,
mock_get_j2,
mock_sroutes,
mock_masq,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid):
self.conf.config(net_config_override='/foo/net-config.json')
self.conf.config(local_interface='ethX')
self.conf.config(undercloud_public_host='4.3.2.1')
self.conf.config(local_mtu='1234')
self.conf.config(undercloud_nameservers=['8.8.8.8', '8.8.4.4'])
self.conf.config(subnets='foo')
self.conf.config(local_subnet='foo')
mock_masq.return_value = {'1.1.1.1/11': ['2.2.2.2/22']}
mock_sroutes.return_value = {'ip_netmask': '1.1.1.1/11',
'next_hop': '1.1.1.1'}
instack_net_conf = """
"network_config": [
{
"type": "ovs_bridge",
"name": "br-ctlplane",
"ovs_extra": [
"br-set-external-id br-ctlplane bridge-id br-ctlplane"
],
"members": [
{
"type": "interface",
"name": "{{LOCAL_INTERFACE}}",
"primary": "true",
"mtu": {{LOCAL_MTU}},
"dns_servers": {{UNDERCLOUD_NAMESERVERS}}
}
],
"addresses": [
{
"ip_netmask": "{{PUBLIC_INTERFACE_IP}}"
}
],
"routes": {{SUBNETS_STATIC_ROUTES}},
"mtu": {{LOCAL_MTU}}
}
]
"""
expected_net_conf = json.loads(
"""
{"network_config": [
{
"type": "ovs_bridge",
"name": "br-ctlplane",
"ovs_extra": [
"br-set-external-id br-ctlplane bridge-id br-ctlplane"
],
"members": [
{
"type": "interface",
"name": "ethX",
"primary": "true",
"mtu": 1234,
"dns_servers": ["8.8.8.8", "8.8.4.4"]
}
],
"addresses": [
{
"ip_netmask": "4.3.2.1"
}
],
"routes": {"next_hop": "1.1.1.1", "ip_netmask": "1.1.1.1/11"},
"mtu": 1234
}
]}
"""
)
env = mock.Mock()
env.get_template = mock.Mock(return_value=Template(instack_net_conf))
mock_get_j2.return_value = (env, None)
arglist = ['--no-validations']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
os_orig = os.path.exists
with mock.patch('os.path.exists') as mock_exists:
def fcheck(*args, **kwargs):
if '/foo/net-config.json' in args:
return True
return os_orig(*args, **kwargs)
mock_exists.side_effect = fcheck
self.cmd.take_action(parsed_args)
# unpack the write env file call to verify if the produced net config
# override JSON matches our expectations
found_net_conf_override = False
for call in mock_wr.call_args_list:
args, kwargs = call
for a in args:
if 'UndercloudNetConfigOverride' in a:
found_net_conf_override = True
self.assertTrue(
a['UndercloudNetConfigOverride'] == expected_net_conf)
self.assertTrue(found_net_conf_override)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'environments/services/masquerade-networks.yaml',
'-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '4.3.2.1',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack',
'--cleanup', '-e',
'/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('six.moves.builtins.open')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
def test_undercloud_install_with_heat_and_debug(self, mock_subprocess,
mock_wr,
mock_os, mock_copy,
mock_open, mock_user,
mock_getuid):
self.conf.config(undercloud_log_file='/foo/bar')
arglist = ['--no-validations']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
old_verbose = self.cmd.app_args.verbose_level
self.cmd.app_args.verbose_level = 2
self.cmd.take_action(parsed_args)
self.cmd.app_args.verbose_level = old_verbose
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--debug', '--log-file=/foo/bar', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('six.moves.builtins.open')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
def test_undercloud_install_with_heat_true(self, mock_subprocess,
mock_wr,
mock_os, mock_copy,
mock_open, mock_user,
mock_getuid):
self.conf.config(undercloud_log_file='/foo/bar')
arglist = ['--no-validations']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml', '--log-file=/foo/bar', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
def test_undercloud_install_with_swift_encryption(self, mock_subprocess,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid):
arglist = ['--no-validations']
verifylist = []
self.conf.set_default('enable_swift_encryption', True)
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/barbican.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'barbican-backend-simple-crypto.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
class TestUndercloudUpgrade(TestPluginV1):
def setUp(self):
super(TestUndercloudUpgrade, self).setUp()
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
self.conf.config(container_images_file='/home/stack/foo.yaml')
self.conf.set_default('output_dir', '/home/stack')
# setting this so we don't have to mock get_local_timezone everywhere
self.conf.set_default('undercloud_timezone', 'UTC')
# don't actually load config from ~/undercloud.conf
self.mock_config_load = self.useFixture(
fixtures.MockPatch('tripleoclient.utils.load_config'))
# Get the command object to test
app_args = mock.Mock()
app_args.verbose_level = 1
self.cmd = undercloud.UpgradeUndercloud(self.app, app_args)
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
@mock.patch.object(sys, 'executable', 'python2')
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_default(self, mock_run_command,
mock_subprocess, mock_wr,
mock_os_mkdir, mock_copy, mock_user,
mock_getuid, mock_confirm):
arglist = ['--no-validations']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_run_command.assert_called_with(
['sudo', 'dnf', 'upgrade', '-y',
'python2-tripleoclient',
'openstack-tripleo-common',
'openstack-tripleo-heat-templates',
'openstack-tripleo-validations',
'tripleo-ansible'],
name='Update extra packages'
)
mock_subprocess.assert_called_with([
'openstack', 'undercloud', 'upgrade', '--skip-package-updates',
'--no-validations'])
@mock.patch.object(sys, 'executable', 'python3')
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_all_opts(self, mock_run_command,
mock_subprocess,
mock_wr,
mock_os, mock_copy, mock_user,
mock_getuid):
arglist = ['--force-stack-update', '--no-validations',
'--inflight-validations', '--dry-run', '--yes']
verifylist = []
self.cmd.app_args.verbose_level = 2
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_run_command.assert_not_called()
mock_subprocess.assert_called_with([
'openstack', 'undercloud', 'upgrade', '--skip-package-updates',
'--force-stack-update', '--no-validations',
'--inflight-validations', '--dry-run', '--yes', '--debug'])
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_no_pkgs(self, mock_run_command,
mock_subprocess,
mock_wr,
mock_os, mock_copy, mock_user,
mock_getuid, mock_confirm):
arglist = ['--no-validations', '--skip-package-updates']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--upgrade', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'lifecycle/undercloud-upgrade-prepare.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_with_heat_enabled(self, mock_run_command,
mock_subprocess,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid, mock_confirm):
arglist = ['--no-validations', '--skip-package-updates']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--upgrade', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'lifecycle/undercloud-upgrade-prepare.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_with_heat_true(self, mock_run_command,
mock_subprocess,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid, mock_confirm):
arglist = ['--no-validations', '--skip-package-updates']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--upgrade', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'lifecycle/undercloud-upgrade-prepare.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_with_heat_and_yes(self, mock_run_command,
mock_subprocess,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid):
arglist = ['--no-validations', '-y', '--skip-package-updates']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
self.cmd.take_action(parsed_args)
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'-y', '--upgrade', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'lifecycle/undercloud-upgrade-prepare.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
# TODO(cjeanner) drop once we have proper oslo.privsep
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
# TODO(cjeanner) drop once we have proper oslo.privsep
@mock.patch('os.geteuid', return_value=1001)
@mock.patch('getpass.getuser', return_value='stack')
@mock.patch('shutil.copy')
@mock.patch('os.mkdir')
@mock.patch('tripleoclient.utils.write_env_file', autospec=True)
@mock.patch('subprocess.check_call', autospec=True)
@mock.patch('tripleoclient.utils.run_command', autospec=True)
def test_undercloud_upgrade_with_heat_and_debug(self, mock_run_command,
mock_subprocess,
mock_wr, mock_os,
mock_copy, mock_user,
mock_getuid, mock_confirm):
arglist = ['--no-validations', '--skip-package-updates']
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
old_verbose = self.cmd.app_args.verbose_level
self.cmd.app_args.verbose_level = 2
self.cmd.take_action(parsed_args)
self.cmd.app_args.verbose_level = old_verbose
mock_subprocess.assert_called_with(
['sudo', '--preserve-env', 'openstack', 'tripleo', 'deploy',
'--standalone', '--standalone-role', 'Undercloud', '--stack',
'undercloud', '--local-domain=localdomain',
'--local-ip=192.168.24.1/24',
'--templates=/usr/share/openstack-tripleo-heat-templates/',
'--networks-file=/usr/share/openstack-tripleo-heat-templates/'
'network_data_undercloud.yaml',
'--upgrade', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'lifecycle/undercloud-upgrade-prepare.yaml',
'--heat-native', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'undercloud.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'use-dns-for-vips.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'podman.yaml', '-e', '/home/stack/foo.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/ironic-inspector.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-remove-novajoin.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'disable-telemetry.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/tempest.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'public-tls-undercloud.yaml',
'--public-virtual-ip', '192.168.24.2',
'--control-virtual-ip', '192.168.24.3', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'ssl/tls-endpoints-public-ip.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-haproxy.yaml', '-e',
'/usr/share/openstack-tripleo-heat-templates/environments/'
'services/undercloud-keepalived.yaml',
'--deployment-user', 'stack',
'--output-dir=/home/stack', '--cleanup',
'-e', '/home/stack/tripleo-config-generated-env-files/'
'undercloud_parameters.yaml',
'--debug', '--log-file=install-undercloud.log', '-e',
'/usr/share/openstack-tripleo-heat-templates/'
'undercloud-stack-vstate-dropin.yaml'])
| 51.503086
| 79
| 0.578215
| 5,129
| 50,061
| 5.532268
| 0.067265
| 0.096987
| 0.11207
| 0.162502
| 0.895366
| 0.885744
| 0.875841
| 0.871048
| 0.868722
| 0.858044
| 0
| 0.01278
| 0.273207
| 50,061
| 971
| 80
| 51.556128
| 0.767095
| 0.054414
| 0
| 0.801198
| 0
| 0
| 0.485398
| 0.368027
| 0
| 0
| 0
| 0.00206
| 0.021557
| 1
| 0.020359
| false
| 0.015569
| 0.011976
| 0
| 0.038323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e89f7946007927078f0fe761fc6ced711f8b09f8
| 191
|
py
|
Python
|
week1/comments.py
|
Mohamed-Magid/image-processing
|
eeda9a57cb1c3370c66f82873f9162ba5d711a3b
|
[
"MIT"
] | 3
|
2020-11-03T20:54:43.000Z
|
2020-11-04T03:15:55.000Z
|
week1/comments.py
|
Mohamed-Magid/image-processing
|
eeda9a57cb1c3370c66f82873f9162ba5d711a3b
|
[
"MIT"
] | null | null | null |
week1/comments.py
|
Mohamed-Magid/image-processing
|
eeda9a57cb1c3370c66f82873f9162ba5d711a3b
|
[
"MIT"
] | 1
|
2020-12-01T20:43:18.000Z
|
2020-12-01T20:43:18.000Z
|
print( "Hello World" )
# Comment here !
print( "Hello World" ) # or here !
# multi
# line
# comment
print( "Hello World" )
"""
All text here
Is just
Another comment
"""
print( "Hello World" )
| 14.692308
| 34
| 0.649215
| 26
| 191
| 4.769231
| 0.5
| 0.322581
| 0.483871
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198953
| 191
| 13
| 35
| 14.692308
| 0.810458
| 0.225131
| 0
| 1
| 0
| 0
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
e8a417fd90b800d98e8517c27270951da6ced564
| 2,378
|
py
|
Python
|
eats/tests/unit/test_utils.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | null | null | null |
eats/tests/unit/test_utils.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | 5
|
2021-03-18T21:34:44.000Z
|
2022-03-11T23:35:23.000Z
|
eats/tests/unit/test_utils.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
from eats.common.utils import get_root_url
class TestUtilsMethods(unittest.TestCase):
def test_get_root_url_local_url(self):
self.assertEqual(get_root_url('http://localhost/'), 'http://localhost/')
def test_get_root_url_local_url_with_port(self):
self.assertEqual(get_root_url('http://localhost:9998/'), 'http://localhost:9998/')
def test_get_root_url_local_url_with_path(self):
self.assertEqual(get_root_url('http://localhost/favicon.ico'), 'http://localhost/')
def test_get_root_url_local_url_with_fragment(self):
self.assertEqual(get_root_url('http://localhost/#/form'), 'http://localhost/')
def test_get_root_url_local_url_with_fragment_and_port(self):
self.assertEqual(get_root_url('http://localhost:9998/#/favicon.ico'), 'http://localhost:9998/')
def test_get_root_url_local_url_with_query(self):
self.assertEqual(get_root_url('http://localhost/?favicon.ico'), 'http://localhost/')
def test_get_root_url_local_url_with_query_and_port(self):
self.assertEqual(get_root_url('http://localhost:9998/?favicon.ico'), 'http://localhost:9998/')
def test_get_root_url_remote_url(self):
self.assertEqual(get_root_url('http://www.hoverstate.com/'), 'http://www.hoverstate.com/')
def test_get_root_url_remote_url_with_port(self):
self.assertEqual(get_root_url('http://www.hoverstate.com:8080/'), 'http://www.hoverstate.com:8080/')
def test_get_root_url_remote_url_with_path(self):
self.assertEqual(get_root_url('http://www.hoverstate.com/favicon.ico'), 'http://www.hoverstate.com/')
def test_get_root_url_remote_url_with_fragment(self):
self.assertEqual(get_root_url('http://www.hoverstate.com/#/form'), 'http://www.hoverstate.com/')
def test_get_root_url_remote_url_with_fragment_and_port(self):
self.assertEqual(get_root_url('http://www.hoverstate.com:8080/#/favicon.ico'), 'http://www.hoverstate.com:8080/')
def test_get_root_url_remote_url_with_query(self):
self.assertEqual(get_root_url('http://www.hoverstate.com/?favicon.ico'), 'http://www.hoverstate.com/')
def test_get_root_url_remote_url_with_query_and_port(self):
self.assertEqual(get_root_url('http://www.hoverstate.com:8080/?favicon.ico'), 'http://www.hoverstate.com:8080/')
if __name__ == '__main__':
unittest.main()
| 47.56
| 121
| 0.737174
| 347
| 2,378
| 4.648415
| 0.106628
| 0.125852
| 0.179789
| 0.121513
| 0.928084
| 0.928084
| 0.928084
| 0.910725
| 0.875387
| 0.875387
| 0
| 0.022631
| 0.108074
| 2,378
| 50
| 122
| 47.56
| 0.73786
| 0
| 0
| 0
| 0
| 0
| 0.327028
| 0
| 0
| 0
| 0
| 0
| 0.424242
| 1
| 0.424242
| false
| 0
| 0.060606
| 0
| 0.515152
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
e8bbcd1b45e22c3f85a63d2798d91e1f87056a50
| 48
|
py
|
Python
|
instance/config.py
|
Juliet-jay/News-Hightlights
|
1e2dc831695f6395b5aac58560457c927eee4946
|
[
"MIT"
] | null | null | null |
instance/config.py
|
Juliet-jay/News-Hightlights
|
1e2dc831695f6395b5aac58560457c927eee4946
|
[
"MIT"
] | 1
|
2021-06-02T00:15:58.000Z
|
2021-06-02T00:15:58.000Z
|
instance/config.py
|
Juliet-jay/News-Hightlights
|
1e2dc831695f6395b5aac58560457c927eee4946
|
[
"MIT"
] | null | null | null |
NEWS_API_KEY="dae9480f7d12456fb06a80eceefc97b7"
| 24
| 47
| 0.916667
| 4
| 48
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.361702
| 0.020833
| 48
| 1
| 48
| 48
| 0.531915
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cf89e5cda522e3f3f369165ab9c468adbc365e1
| 79
|
py
|
Python
|
python/module/zgraph/__init__.py
|
DerThorsten/zgraph
|
a981f7bc0fef9581626779663a3c54fb8922c72a
|
[
"MIT"
] | null | null | null |
python/module/zgraph/__init__.py
|
DerThorsten/zgraph
|
a981f7bc0fef9581626779663a3c54fb8922c72a
|
[
"MIT"
] | null | null | null |
python/module/zgraph/__init__.py
|
DerThorsten/zgraph
|
a981f7bc0fef9581626779663a3c54fb8922c72a
|
[
"MIT"
] | null | null | null |
from ._zgraph import *
def pure_python():
"""
hello
"""
pass
| 8.777778
| 22
| 0.506329
| 8
| 79
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.35443
| 79
| 8
| 23
| 9.875
| 0.745098
| 0.063291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
fa11a33d71e0407f89f67b4ef1b1a7b8a73a527c
| 678
|
py
|
Python
|
test/example_easy2.py
|
pseudoyim/car
|
c7c1f1bb893fb8fb6f83c6765473d506979ec4b0
|
[
"BSD-2-Clause"
] | null | null | null |
test/example_easy2.py
|
pseudoyim/car
|
c7c1f1bb893fb8fb6f83c6765473d506979ec4b0
|
[
"BSD-2-Clause"
] | null | null | null |
test/example_easy2.py
|
pseudoyim/car
|
c7c1f1bb893fb8fb6f83c6765473d506979ec4b0
|
[
"BSD-2-Clause"
] | 2
|
2020-03-19T06:48:42.000Z
|
2020-03-19T19:29:13.000Z
|
import car
car.forward_right(1000)
car.pause(1000)
car.forward_left(1000)
car.pause(1000)
car.reverse_right(1000)
car.pause(1000)
car.reverse_left(1000)
car.pause(1000)
car.left(200)
car.right(200)
car.left(200)
car.forward(100)
car.pause(100)
car.forward(100)
car.pause(100)
car.reverse(100)
car.pause(100)
car.reverse(100)
car.pause(100)
car.forward(100)
car.pause(100)
car.forward(100)
car.pause(100)
car.left(200)
car.right(200)
car.left(200)
car.forward_left(6000)
car.reverse_left(2000)
car.forward(100)
car.pause(100)
car.forward(100)
car.pause(100)
car.left(200)
car.right(200)
car.left(200)
car.pause(500)
| 13.294118
| 24
| 0.69174
| 116
| 678
| 3.991379
| 0.112069
| 0.207343
| 0.190065
| 0.241901
| 0.87041
| 0.87041
| 0.643629
| 0.643629
| 0.643629
| 0.643629
| 0
| 0.204506
| 0.148968
| 678
| 50
| 25
| 13.56
| 0.59792
| 0
| 0
| 0.783784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.027027
| 0
| 0.027027
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fa6fd89378450705b8e526c65f60a0f04621a648
| 164,305
|
py
|
Python
|
Marshal/.py3_.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
Marshal/.py3_.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
Marshal/.py3_.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
# File Names : kontol.py
# Python Bytecode : 3.8
# Time Succses Parser : Mon Jul 6 13:51:36 2020
# Auto Parser Dis Version : 1.2.1
# Source : https://www.github.com/Datez-Kun
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nso\xdc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe9\xdb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsc\xdb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdd\xda\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsW\xda\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd1\xd9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsK\xd9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc5\xd8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns?\xd8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb9\xd7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns3\xd7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xad\xd6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\'\xd6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa1\xd5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1b\xd5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x95\xd4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0f\xd4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x89\xd3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x03\xd3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns}\xd2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf7\xd1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsq\xd1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xeb\xd0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nse\xd0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdf\xcf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsY\xcf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd3\xce\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsM\xce\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc7\xcd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsA\xcd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbb\xcc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns5\xcc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xaf\xcb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns)\xcb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa3\xca\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1d\xca\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x97\xc9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x11\xc9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8b\xc8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x05\xc8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x7f\xc7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf9\xc6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nss\xc6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xed\xc5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsg\xc5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe1\xc4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns[\xc4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd5\xc3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsO\xc3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc9\xc2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsC\xc2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbd\xc1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns7\xc1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb1\xc0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns+\xc0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa5\xbf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1f\xbf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x99\xbe\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x13\xbe\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8d\xbd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x07\xbd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x81\xbc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xfb\xbb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsu\xbb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xef\xba\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsi\xba\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe3\xb9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns]\xb9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd7\xb8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsQ\xb8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcb\xb7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsE\xb7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbf\xb6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns9\xb6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb3\xb5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns-\xb5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa7\xb4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns!\xb4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9b\xb3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x15\xb3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8f\xb2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\t\xb2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x83\xb1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xfd\xb0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsw\xb0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf1\xaf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsk\xaf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe5\xae\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns_\xae\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd9\xad\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsS\xad\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcd\xac\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsG\xac\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc1\xab\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns;\xab\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb5\xaa\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns/\xaa\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa9\xa9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns#\xa9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9d\xa8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x17\xa8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x91\xa7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0b\xa7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x85\xa6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xff\xa5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsy\xa5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf3\xa4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsm\xa4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe7\xa3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsa\xa3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdb\xa2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsU\xa2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcf\xa1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsI\xa1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc3\xa0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns=\xa0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb7\x9f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns1\x9f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xab\x9e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns%\x9e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9f\x9d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x19\x9d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x93\x9c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\r\x9c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x87\x9b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x01\x9b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns{\x9a\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf5\x99\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nso\x99\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe9\x98\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsc\x98\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdd\x97\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsW\x97\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd1\x96\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsK\x96\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc5\x95\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns?\x95\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb9\x94\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns3\x94\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xad\x93\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\'\x93\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa1\x92\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1b\x92\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x95\x91\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0f\x91\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x89\x90\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x03\x90\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns}\x8f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf7\x8e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsq\x8e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xeb\x8d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nse\x8d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdf\x8c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsY\x8c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd3\x8b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsM\x8b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc7\x8a\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsA\x8a\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbb\x89\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns5\x89\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xaf\x88\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns)\x88\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa3\x87\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1d\x87\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x97\x86\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x11\x86\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8b\x85\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x05\x85\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x7f\x84\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf9\x83\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nss\x83\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xed\x82\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsg\x82\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe1\x81\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns[\x81\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd5\x80\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsO\x80\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc9\x7f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsC\x7f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbd~\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns7~\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb1}\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns+}\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa5|\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1f|\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x99{\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x13{\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8dz\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x07z\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x81y\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xfbx\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsux\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xefw\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsiw\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe3v\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns]v\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd7u\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsQu\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcbt\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsEt\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbfs\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns9s\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb3r\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns-r\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa7q\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns!q\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9bp\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x15p\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8fo\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\to\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x83n\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xfdm\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nswm\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf1l\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nskl\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe5k\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns_k\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd9j\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsSj\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcdi\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsGi\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc1h\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns;h\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb5g\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns/g\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa9f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns#f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9de\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x17e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x91d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0bd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x85c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xffb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsyb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf3a\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsma\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe7`\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsa`\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdb_\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsU_\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcf^\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsI^\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc3]\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns=]\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb7\\\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns1\\\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xab[\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns%[\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9fZ\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x19Z\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x93Y\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\rY\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x87X\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x01X\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns{W\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf5V\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsoV\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe9U\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NscU\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xddT\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsWT\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd1S\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsKS\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc5R\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns?R\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb9Q\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns3Q\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xadP\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\'P\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa1O\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1bO\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x95N\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0fN\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x89M\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x03M\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns}L\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf7K\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsqK\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xebJ\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NseJ\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdfI\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsYI\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd3H\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsMH\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc7G\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsAG\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbbF\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns5F\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xafE\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns)E\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa3D\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1dD\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x97C\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x11C\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8bB\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x05B\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x7fA\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf9@\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nss@\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xed?\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsg?\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe1>\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns[>\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd5=\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsO=\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc9<\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsC<\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbd;\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns7;\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb1:\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns+:\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa59\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1f9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x998\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x138\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8d7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x077\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x816\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xfb5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsu5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xef4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsi4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe33\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns]3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd72\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsQ2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcb1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsE1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbf0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns90\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb3/\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns-/\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa7.\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns!.\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9b-\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x15-\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x8f,\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\t,\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x83+\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xfd*\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsw*\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf1)\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsk)\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe5(\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns_(\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd9\'\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsS\'\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcd&\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsG&\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc1%\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns;%\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb5$\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns/$\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa9#\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns##\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9d"\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x17"\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x91!\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0b!\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x85 \x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xff\x1f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsy\x1f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf3\x1e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsm\x1e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe7\x1d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsa\x1d\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdb\x1c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsU\x1c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xcf\x1b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsI\x1b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc3\x1a\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns=\x1a\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb7\x19\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns1\x19\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xab\x18\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns%\x18\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x9f\x17\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x19\x17\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x93\x16\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\r\x16\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x87\x15\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x01\x15\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns{\x14\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf5\x13\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nso\x13\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe9\x12\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsc\x12\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xdd\x11\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsW\x11\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xd1\x10\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsK\x10\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xc5\x0f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns?\x0f\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xb9\x0e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns3\x0e\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xad\r\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\'\r\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa1\x0c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1b\x0c\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x95\x0b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x0f\x0b\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x89\n\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x03\n\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00@\x00\x00\x00s\x82\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x02Z\x03d\x03Z\x04d\x04Z\x05d\x05Z\x06d\x06Z\x07d\x07Z\x08d\x08Z\td\tZ\nd\nZ\x0bd\x0bZ\x0cd\x0cd\r\x84\x00Z\rd\x0ed\x0f\x84\x00Z\x0ed\x10d\x11\x84\x00Z\x0fz\ne\x0e\x83\x00\x01\x00W\x00n\x1a\x04\x00e\x10k\nr|\x01\x00\x01\x00\x01\x00e\x0f\x83\x00\x01\x00Y\x00n\x02X\x00d\x01S\x00)\x12\xe9\x00\x00\x00\x00Nz\x07\x1b[1;92mz\x07\x1b[1;96mz\x07\x1b[1;93mz\x07\x1b[1;95mz\x07\x1b[1;97mz\x07\x1b[1;94mz\x07\x1b[1;91mz\x07\x1b[1;30mz\x07\x1b[4;92mz\x04\x1b[0mc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\xd2\x00\x00\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x02t\x03d\x02\x17\x00\x83\x01\x01\x00t\x02t\x04d\x03\x17\x00\x83\x01\x01\x00t\x02t\x05d\x04\x17\x00t\x06\x17\x00d\x05\x17\x00t\x05\x17\x00d\x06\x17\x00t\x07\x17\x00d\x07\x17\x00t\x08\x17\x00d\x08\x17\x00t\t\x17\x00d\t\x17\x00\x83\x01\x01\x00t\x02t\x05d\x04\x17\x00t\x06\x17\x00d\x05\x17\x00t\x05\x17\x00d\x06\x17\x00t\x07\x17\x00d\n\x17\x00t\x08\x17\x00d\x08\x17\x00t\t\x17\x00d\x0b\x17\x00\x83\x01\x01\x00t\x02t\x05d\x04\x17\x00t\x06\x17\x00d\x05\x17\x00t\x05\x17\x00d\x06\x17\x00t\x07\x17\x00d\x0c\x17\x00t\x08\x17\x00d\x08\x17\x00t\n\x17\x00d\r\x17\x00t\x05\x17\x00\x83\x01\x01\x00t\x02t\x04d\x03\x17\x00\x83\x01\x01\x00d\x00S\x00)\x0eN\xda\x05clearuB\x01\x00\x00\xe2\x94\xac\xe2\x94\x80\xe2\x94\x90\xe2\x94\xac \xe2\x94\xac\xe2\x94\x8c\xe2\x94\xac\xe2\x94\x90\xe2\x94\xac \xe2\x94\xac\xe2\x94\x8c\xe2\x94\x80\xe2\x94\x90\xe2\x94\x8c\xe2\x94\x90\xe2\x94\x8c \xe2\x94\xac\xe2\x94\x80\xe2\x94\x90\xe2\x94\x8c\xe2\x94\x80\xe2\x94\x90\xe2\x94\x8c\xe2\x94\xac\xe2\x94\x90\xe2\x94\x8c\xe2\x94\x80\xe2\x94\x90\xe2\x94\xac\xe2\x94\xac \xe2\x94\x8c\xe2\x94\x80\xe2\x94\x90\xe2\x94\xac\xe2\x94\x80\xe2\x94\x90\n\xe2\x94\x9c\xe2\x94\x80\xe2\x94\x98\xe2\x94\x94\xe2\x94\xac\xe2\x94\x98 \xe2\x94\x82 \xe2\x94\x9c\xe2\x94\x80\xe2\x94\xa4\xe2\x94\x82 \xe2\x94\x82\xe2\x94\x82\xe2\x94\x82\xe2\x94\x82 \xe2\x94\x82 \xe2\x94\x82 \xe2\x94\x82\xe2\x94\x82\xe2\x94\x82\xe2\x94\x82\xe2\x94\x9c\xe2\x94\x80\xe2\x94\x98\xe2\x94\x82\xe2\x94\x82 \xe2\x94\x9c\xe2\x94\xa4 \xe2\x94\x9c\xe2\x94\xac\xe2\x94\x98\n\xe2\x94\xb4 \xe2\x94\xb4 \xe2\x94\xb4 \xe2\x94\xb4 \xe2\x94\xb4\xe2\x94\x94\xe2\x94\x80\xe2\x94\x98\xe2\x94\x98\xe2\x94\x94\xe2\x94\x98 \xe2\x94\x94\xe2\x94\x80\xe2\x94\x98\xe2\x94\x94\xe2\x94\x80\xe2\x94\x98\xe2\x94\xb4 \xe2\x94\xb4\xe2\x94\xb4 \xe2\x94\xb4\xe2\x94\xb4\xe2\x94\x80\xe2\x94\x98\xe2\x94\x94\xe2\x94\x80\xe2\x94\x98\xe2\x94\xb4\xe2\x94\x94\xe2\x94\x80z*------------------------------------------\xfa\x01[u\x03\x00\x00\x00\xe2\x80\xa2\xfa\x02] z\x07Author \xfa\x02: Z\x04SanzZ\x07Youtubez\rSANZ SOEKAMTIZ\x06Githubz\x1ehttps://github.com/B4N954N2-ID)\x0b\xda\x02os\xda\x06system\xda\x05print\xda\x01u\xda\x02hi\xda\x01p\xda\x01h\xda\x02pu\xda\x01m\xda\x02bi\xda\x02hg\xa9\x00r\x11\x00\x00\x00r\x11\x00\x00\x00\xfa\x06<Sanz>\xda\x04logo\x0f\x00\x00\x00s\x0e\x00\x00\x00\x00\x01\n\x04\x0c\x03\x0c\x014\x014\x018\x01r\x13\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s\x96\x01\x00\x00\x90\x01z*t\x00\x83\x00\x01\x00t\x01t\x02d\x01\x17\x00t\x03\x17\x00d\x02\x17\x00t\x02\x17\x00d\x03\x17\x00t\x04\x17\x00d\x04\x17\x00t\x05\x17\x00d\x05\x17\x00t\x02\x17\x00d\x06\x17\x00\x83\x01\x01\x00t\x06t\x02d\x01\x17\x00t\x03\x17\x00d\x02\x17\x00t\x02\x17\x00d\x03\x17\x00t\x07\x17\x00d\x07\x17\x00t\x05\x17\x00d\x05\x17\x00t\x03\x17\x00\x83\x01}\x00t\x08|\x00\x83\x01\xa0\t\xa1\x00}\x01t\n|\x01d\x08d\t\x83\x03}\x02|\x00\xa0\x0bd\nd\x0b\xa1\x02}\x03t\x0c\xa0\r|\x02\xa1\x01}\x04t\x08|\x03d\x0c\x83\x02}\x05|\x05\xa0\x0ed\r\xa1\x01\x01\x00|\x05\xa0\x0ed\x0e\xa1\x01\x01\x00|\x05\xa0\x0ed\x0ft\x0f|\x04\x83\x01\x17\x00d\x10\x17\x00\xa1\x01\x01\x00|\x05\xa0\x10\xa1\x00\x01\x00t\x11\xa0\x12d\x11\xa1\x01\x01\x00t\x01t\x02d\x01\x17\x00t\x03\x17\x00d\x02\x17\x00t\x02\x17\x00d\x03\x17\x00t\x07\x17\x00d\x12\x17\x00t\x05\x17\x00d\x05\x17\x00t\x03\x17\x00|\x03\x17\x00\x83\x01\x01\x00t\x13\xa0\x14d\x13\xa1\x01\x01\x00t\x11\xa0\x12d\x14\xa1\x01\x01\x00W\x00nd\x04\x00t\x15k\n\x90\x01rv\x01\x00\x01\x00\x01\x00t\x01t\x02d\x01\x17\x00t\x05\x17\x00d\x15\x17\x00t\x02\x17\x00d\x03\x17\x00t\x07\x17\x00d\x16\x17\x00\x83\x01\x01\x00t\x11\xa0\x12d\x17\xa1\x01\x01\x00t\x16\x83\x00\x01\x00Y\x00n\x1c\x04\x00t\x17k\n\x90\x01r\x90\x01\x00\x01\x00\x01\x00t\x18\x83\x00\x01\x00Y\x00n\x02X\x00d\x00S\x00)\x18Nr\x03\x00\x00\x00\xfa\x01+r\x04\x00\x00\x00z\x03Ex r\x05\x00\x00\x00z\x0f/sdcard/file.pyz\x05File z\x06<Sanz>\xda\x04execz\x03.pyz\x07_enc.py\xda\x01wzW# Compile by Sanz\n# Youtube : SANZ SOEKAMTI\n# Github : https://github.com/B4N954N2-ID\nz\x0fimport marshal\nz\x13exec(marshal.loads(z\x02))\xe9\x02\x00\x00\x00z\x10Files Encrypted \xfa)xdg-open https://youtube.com/SanzSoekamti\xe7\x9a\x99\x99\x99\x99\x99\xb9?\xda\x01xz\x0eFile Not Found\xe9\x01\x00\x00\x00)\x19r\x13\x00\x00\x00r\x08\x00\x00\x00r\x0b\x00\x00\x00r\x0c\x00\x00\x00\xda\x01kr\x0e\x00\x00\x00\xda\x05inputr\r\x00\x00\x00\xda\x04open\xda\x04read\xda\x07compile\xda\x07replace\xda\x07marshal\xda\x05dumps\xda\x05write\xda\x04repr\xda\x05close\xda\x04time\xda\x05sleepr\x06\x00\x00\x00r\x07\x00\x00\x00\xda\x07IOError\xda\x04main\xda\x11KeyboardInterrupt\xda\x03out)\x06\xda\x01ar\x1a\x00\x00\x00\xda\x01b\xda\x01c\xda\x01d\xda\x01er\x11\x00\x00\x00r\x11\x00\x00\x00r\x12\x00\x00\x00r*\x00\x00\x00\x1d\x00\x00\x00s.\x00\x00\x00\x00\x01\x04\x01\x06\x014\x010\x01\x0c\x01\x0c\x01\x0c\x01\n\x01\n\x01\n\x01\n\x01\x16\x01\x08\x01\n\x014\x00\n\x00\x0e\x01\x10\x01$\x01\n\x01\n\x01\x10\x01r*\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sj\x00\x00\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x02t\x03d\x02\x17\x00t\x04\x17\x00d\x03\x17\x00t\x03\x17\x00d\x04\x17\x00t\x03\x17\x00d\x05\x17\x00\x83\x01\x01\x00t\x05\xa0\x06d\x06\xa1\x01\x01\x00t\x00\xa0\x01d\x07\xa1\x01\x01\x00t\x07t\x03d\x02\x17\x00t\x08\x17\x00d\x08\x17\x00t\x03\x17\x00d\x04\x17\x00t\x03\x17\x00d\t\x17\x00\x83\x01\x01\x00d\x00S\x00)\nNr\x19\x00\x00\x00r\x03\x00\x00\x00r\x14\x00\x00\x00r\x04\x00\x00\x00z\x1eThanks For Using This My Toolsr\x18\x00\x00\x00r\x1b\x00\x00\x00\xfa\x01!Z\x04Exit)\tr\'\x00\x00\x00r(\x00\x00\x00r\x08\x00\x00\x00r\x0b\x00\x00\x00r\x0c\x00\x00\x00r\x06\x00\x00\x00r\x07\x00\x00\x00\xda\x04exitr\x0e\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x12\x00\x00\x00r,\x00\x00\x004\x00\x00\x00s\n\x00\x00\x00\x00\x01\n\x01$\x00\n\x00\n\x01r,\x00\x00\x00)\x11r"\x00\x00\x00r\x06\x00\x00\x00r\'\x00\x00\x00r\x0c\x00\x00\x00r\x0f\x00\x00\x00r\x1c\x00\x00\x00r\t\x00\x00\x00r\r\x00\x00\x00r.\x00\x00\x00r\x0e\x00\x00\x00r\n\x00\x00\x00r\x10\x00\x00\x00r\x0b\x00\x00\x00r\x13\x00\x00\x00r*\x00\x00\x00r,\x00\x00\x00r+\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x12\x00\x00\x00\xda\x08<module>\x02\x00\x00\x00s"\x00\x00\x00\x18\x02\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x02\x08\x0e\x08\x17\x08\x05\x02\x01\n\x01\x0e\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x06<Sanz>\xda\x08<module>\x04\x00\x00\x00s\x02\x00\x00\x00\x08\x01'))
| 20,538.125
| 164,113
| 0.762071
| 36,806
| 164,305
| 3.401782
| 0.012824
| 0.568104
| 0.584972
| 0.584445
| 0.977964
| 0.976487
| 0.973971
| 0.971982
| 0.970337
| 0.969554
| 0
| 0.392724
| 0.000566
| 164,305
| 8
| 164,113
| 20,538.125
| 0.369742
| 0.001004
| 0
| 0
| 0
| 3
| 0.250288
| 0.237195
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 18
|
d760cfc7432816aa6a7423aa41ae169f9b420d2b
| 27,418
|
py
|
Python
|
socialNetwork/gen-py/social_network/PostStorageService.py
|
rodrigo-bruno/DeathStarBench
|
c9ce09aaf7c1298a7c88efacd1010a71db0fa59d
|
[
"Apache-2.0"
] | 364
|
2019-04-28T01:45:37.000Z
|
2022-03-31T15:08:03.000Z
|
socialNetwork/gen-py/social_network/PostStorageService.py
|
rodrigo-bruno/DeathStarBench
|
c9ce09aaf7c1298a7c88efacd1010a71db0fa59d
|
[
"Apache-2.0"
] | 111
|
2019-04-15T11:08:49.000Z
|
2022-03-31T17:39:16.000Z
|
socialNetwork/gen-py/social_network/PostStorageService.py
|
rodrigo-bruno/DeathStarBench
|
c9ce09aaf7c1298a7c88efacd1010a71db0fa59d
|
[
"Apache-2.0"
] | 229
|
2019-05-14T08:55:57.000Z
|
2022-03-31T03:14:55.000Z
|
#
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def StorePost(self, req_id, post, carrier):
"""
Parameters:
- req_id
- post
- carrier
"""
pass
def ReadPost(self, req_id, post_id, carrier):
"""
Parameters:
- req_id
- post_id
- carrier
"""
pass
def ReadPosts(self, req_id, post_ids, carrier):
"""
Parameters:
- req_id
- post_ids
- carrier
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def StorePost(self, req_id, post, carrier):
"""
Parameters:
- req_id
- post
- carrier
"""
self.send_StorePost(req_id, post, carrier)
self.recv_StorePost()
def send_StorePost(self, req_id, post, carrier):
self._oprot.writeMessageBegin('StorePost', TMessageType.CALL, self._seqid)
args = StorePost_args()
args.req_id = req_id
args.post = post
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_StorePost(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = StorePost_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
def ReadPost(self, req_id, post_id, carrier):
"""
Parameters:
- req_id
- post_id
- carrier
"""
self.send_ReadPost(req_id, post_id, carrier)
return self.recv_ReadPost()
def send_ReadPost(self, req_id, post_id, carrier):
self._oprot.writeMessageBegin('ReadPost', TMessageType.CALL, self._seqid)
args = ReadPost_args()
args.req_id = req_id
args.post_id = post_id
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ReadPost(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = ReadPost_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.se is not None:
raise result.se
raise TApplicationException(TApplicationException.MISSING_RESULT, "ReadPost failed: unknown result")
def ReadPosts(self, req_id, post_ids, carrier):
"""
Parameters:
- req_id
- post_ids
- carrier
"""
self.send_ReadPosts(req_id, post_ids, carrier)
return self.recv_ReadPosts()
def send_ReadPosts(self, req_id, post_ids, carrier):
self._oprot.writeMessageBegin('ReadPosts', TMessageType.CALL, self._seqid)
args = ReadPosts_args()
args.req_id = req_id
args.post_ids = post_ids
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ReadPosts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = ReadPosts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.se is not None:
raise result.se
raise TApplicationException(TApplicationException.MISSING_RESULT, "ReadPosts failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["StorePost"] = Processor.process_StorePost
self._processMap["ReadPost"] = Processor.process_ReadPost
self._processMap["ReadPosts"] = Processor.process_ReadPosts
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_StorePost(self, seqid, iprot, oprot):
args = StorePost_args()
args.read(iprot)
iprot.readMessageEnd()
result = StorePost_result()
try:
self._handler.StorePost(args.req_id, args.post, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("StorePost", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_ReadPost(self, seqid, iprot, oprot):
args = ReadPost_args()
args.read(iprot)
iprot.readMessageEnd()
result = ReadPost_result()
try:
result.success = self._handler.ReadPost(args.req_id, args.post_id, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("ReadPost", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_ReadPosts(self, seqid, iprot, oprot):
args = ReadPosts_args()
args.read(iprot)
iprot.readMessageEnd()
result = ReadPosts_result()
try:
result.success = self._handler.ReadPosts(args.req_id, args.post_ids, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("ReadPosts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class StorePost_args(object):
"""
Attributes:
- req_id
- post
- carrier
"""
def __init__(self, req_id=None, post=None, carrier=None,):
self.req_id = req_id
self.post = post
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.post = Post()
self.post.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype108, _vtype109, _size107) = iprot.readMapBegin()
for _i111 in range(_size107):
_key112 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val113 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key112] = _val113
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StorePost_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.post is not None:
oprot.writeFieldBegin('post', TType.STRUCT, 2)
self.post.write(oprot)
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter114, viter115 in self.carrier.items():
oprot.writeString(kiter114.encode('utf-8') if sys.version_info[0] == 2 else kiter114)
oprot.writeString(viter115.encode('utf-8') if sys.version_info[0] == 2 else viter115)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(StorePost_args)
StorePost_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.STRUCT, 'post', [Post, None], None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class StorePost_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StorePost_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(StorePost_result)
StorePost_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class ReadPost_args(object):
"""
Attributes:
- req_id
- post_id
- carrier
"""
def __init__(self, req_id=None, post_id=None, carrier=None,):
self.req_id = req_id
self.post_id = post_id
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.post_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype117, _vtype118, _size116) = iprot.readMapBegin()
for _i120 in range(_size116):
_key121 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val122 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key121] = _val122
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ReadPost_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.post_id is not None:
oprot.writeFieldBegin('post_id', TType.I64, 2)
oprot.writeI64(self.post_id)
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter123, viter124 in self.carrier.items():
oprot.writeString(kiter123.encode('utf-8') if sys.version_info[0] == 2 else kiter123)
oprot.writeString(viter124.encode('utf-8') if sys.version_info[0] == 2 else viter124)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ReadPost_args)
ReadPost_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.I64, 'post_id', None, None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class ReadPost_result(object):
"""
Attributes:
- success
- se
"""
def __init__(self, success=None, se=None,):
self.success = success
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Post()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ReadPost_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ReadPost_result)
ReadPost_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Post, None], None, ), # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class ReadPosts_args(object):
"""
Attributes:
- req_id
- post_ids
- carrier
"""
def __init__(self, req_id=None, post_ids=None, carrier=None,):
self.req_id = req_id
self.post_ids = post_ids
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.post_ids = []
(_etype128, _size125) = iprot.readListBegin()
for _i129 in range(_size125):
_elem130 = iprot.readI64()
self.post_ids.append(_elem130)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype132, _vtype133, _size131) = iprot.readMapBegin()
for _i135 in range(_size131):
_key136 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val137 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key136] = _val137
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ReadPosts_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.post_ids is not None:
oprot.writeFieldBegin('post_ids', TType.LIST, 2)
oprot.writeListBegin(TType.I64, len(self.post_ids))
for iter138 in self.post_ids:
oprot.writeI64(iter138)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter139, viter140 in self.carrier.items():
oprot.writeString(kiter139.encode('utf-8') if sys.version_info[0] == 2 else kiter139)
oprot.writeString(viter140.encode('utf-8') if sys.version_info[0] == 2 else viter140)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ReadPosts_args)
ReadPosts_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.LIST, 'post_ids', (TType.I64, None, False), None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class ReadPosts_result(object):
"""
Attributes:
- success
- se
"""
def __init__(self, success=None, se=None,):
self.success = success
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype144, _size141) = iprot.readListBegin()
for _i145 in range(_size141):
_elem146 = Post()
_elem146.read(iprot)
self.success.append(_elem146)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ReadPosts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter147 in self.success:
iter147.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ReadPosts_result)
ReadPosts_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [Post, None], False), None, ), # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
fix_spec(all_structs)
del all_structs
| 34.444724
| 134
| 0.570939
| 2,930
| 27,418
| 5.125597
| 0.073379
| 0.017978
| 0.026368
| 0.023971
| 0.806033
| 0.780996
| 0.750033
| 0.734519
| 0.717139
| 0.714076
| 0
| 0.018421
| 0.32486
| 27,418
| 795
| 135
| 34.48805
| 0.79288
| 0.024108
| 0
| 0.73955
| 1
| 0
| 0.03043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096463
| false
| 0.004823
| 0.012862
| 0.028939
| 0.192926
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ad72aef4b1e78e193fc9c37c7f4c06819bcf9c74
| 6,543
|
py
|
Python
|
rdmo/domain/tests/test_validator_locked.py
|
berkerY/rdmo
|
c0500f9b6caff9106a254a05e0d0e8018fc8db28
|
[
"Apache-2.0"
] | 77
|
2016-08-09T11:40:20.000Z
|
2022-03-06T11:03:26.000Z
|
rdmo/domain/tests/test_validator_locked.py
|
MSpenger/rdmo
|
c0500f9b6caff9106a254a05e0d0e8018fc8db28
|
[
"Apache-2.0"
] | 377
|
2016-07-01T13:59:36.000Z
|
2022-03-30T13:53:19.000Z
|
rdmo/domain/tests/test_validator_locked.py
|
MSpenger/rdmo
|
c0500f9b6caff9106a254a05e0d0e8018fc8db28
|
[
"Apache-2.0"
] | 47
|
2016-06-23T11:32:19.000Z
|
2022-03-01T11:34:37.000Z
|
import pytest
from django.conf import settings
from django.core.exceptions import ValidationError
from rest_framework.exceptions import \
ValidationError as RestFameworkValidationError
from ..models import Attribute
from ..serializers.v1 import AttributeSerializer
from ..validators import AttributeLockedValidator
def test_create(db):
AttributeLockedValidator()({
'uri_prefix': settings.DEFAULT_URI_PREFIX,
'key': 'test',
'parent': Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
})
def test_create_no_parent(db):
AttributeLockedValidator()({
'uri_prefix': settings.DEFAULT_URI_PREFIX,
'key': 'test'
})
def test_create_error(db):
locked_attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
locked_attribute.locked = True
locked_attribute.save()
with pytest.raises(ValidationError):
AttributeLockedValidator()({
'uri_prefix': settings.DEFAULT_URI_PREFIX,
'key': 'test',
'parent': Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
})
def test_update(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
AttributeLockedValidator(attribute)({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent
})
def test_update_error(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
attribute.locked = True
attribute.save()
with pytest.raises(ValidationError):
AttributeLockedValidator(attribute)({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent,
'locked': True
})
def test_update_lock(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
AttributeLockedValidator(attribute)({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent,
'locked': True
})
def test_update_unlock(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
attribute.locked = True
attribute.save()
AttributeLockedValidator(attribute)({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent,
'locked': False
})
def test_update_error_parent(db):
locked_attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
locked_attribute.locked = True
locked_attribute.save()
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
with pytest.raises(ValidationError):
AttributeLockedValidator(attribute)({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent
})
def test_serializer_create(db):
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer())
validator({
'uri_prefix': settings.DEFAULT_URI_PREFIX,
'key': 'test',
'parent': Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
})
def test_serializer_create_no_parent(db):
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer())
validator({
'uri_prefix': settings.DEFAULT_URI_PREFIX,
'key': 'test'
})
def test_serializer_create_error(db):
locked_attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
locked_attribute.locked = True
locked_attribute.save()
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer())
with pytest.raises(RestFameworkValidationError):
validator({
'uri_prefix': settings.DEFAULT_URI_PREFIX,
'key': 'test',
'parent': locked_attribute
})
def test_serializer_update(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer(instance=attribute))
validator({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent
})
def test_serializer_update_error(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
attribute.locked = True
attribute.save()
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer(instance=attribute))
with pytest.raises(RestFameworkValidationError):
validator({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent,
'locked': True
})
def test_serializer_update_lock(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer(instance=attribute))
validator({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent,
'locked': True
})
def test_serializer_update_unlock(db):
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
attribute.locked = True
attribute.save()
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer(instance=attribute))
validator({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent,
'locked': False
})
def test_serializer_update_error_parent(db):
locked_attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single')
locked_attribute.locked = True
locked_attribute.save()
attribute = Attribute.objects.get(uri='http://example.com/terms/domain/individual/single/text')
validator = AttributeLockedValidator()
validator.set_context(AttributeSerializer(instance=attribute))
with pytest.raises(RestFameworkValidationError):
validator({
'uri_prefix': attribute.uri_prefix,
'key': attribute.key,
'parent': attribute.parent
})
| 30.432558
| 101
| 0.686382
| 664
| 6,543
| 6.612952
| 0.082831
| 0.065589
| 0.07356
| 0.085174
| 0.915281
| 0.913004
| 0.913004
| 0.896151
| 0.879982
| 0.879982
| 0
| 0.000189
| 0.192572
| 6,543
| 214
| 102
| 30.574766
| 0.830967
| 0
| 0
| 0.84375
| 0
| 0
| 0.188751
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.04375
| 0
| 0.14375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d16b2da8a0f7cabcb6debf79a93852c5193df244
| 899
|
py
|
Python
|
uscs_values.py
|
porte404/liquepy
|
9a74dd4705888e9e469fefdf40d82e640712a2b4
|
[
"MIT"
] | null | null | null |
uscs_values.py
|
porte404/liquepy
|
9a74dd4705888e9e469fefdf40d82e640712a2b4
|
[
"MIT"
] | null | null | null |
uscs_values.py
|
porte404/liquepy
|
9a74dd4705888e9e469fefdf40d82e640712a2b4
|
[
"MIT"
] | null | null | null |
fines_content = {
('SW','SP','GW','GP'):0.045,
('SW-SM','SW-SC','SP-SM','SP-SC'):0.115,
('SW-GM','SW-GC','SP-GM','SP-GC'):0.115,
('GW-SM','GW-SC','GP-SM','GP-SC'):0.115,
('GW-GM','GW-GC','GP-GM','GP-GC'):0.115,
('SM-SC','SC-SM'):0.25,
('GM-GC','GC-GM'):0.25,
('SM','GM'):0.3,
('SC','GC'):0.2,
('SW','SP','GW','GP'):0.045,
('SW-SM','SW-SC','SP-SM','SP-SC'):0.115,
('SW-GM','SW-GC','SP-GM','SP-GC'):0.115,
('GW-SM','GW-SC','GP-SM','GP-SC'):0.115,
('GW-GM','GW-GC','GP-GM','GP-GC'):0.115,
('SM-SC','SC-SM'):0.25,
('GM-GC','GC-GM'):0.25,
('SM','GM'):0.3,
('SC','GC'):0.2,
}
density_values = {
('SW','SP','GW','GP'):0.045,
('SW-SM','SW-SC','SP-SM','SP-SC'):0.115,
('SW-GM','SW-GC','SP-GM','SP-GC'):0.115,
('GW-SM','GW-SC','GP-SM','GP-SC'):0.115,
('GW-GM','GW-GC','GP-GM','GP-GC'):0.115,
('SM-SC','SC-SM'):0.25,
('GM-GC','GC-GM'):0.25,
('SM','GM'):0.3,
('SC','GC'):0.2,
}
| 27.242424
| 41
| 0.450501
| 202
| 899
| 1.99505
| 0.089109
| 0.119107
| 0.08933
| 0.059553
| 0.937965
| 0.937965
| 0.937965
| 0.937965
| 0.937965
| 0.937965
| 0
| 0.108303
| 0.07564
| 899
| 33
| 42
| 27.242424
| 0.376655
| 0
| 0
| 0.870968
| 0
| 0
| 0.400922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
66f782787f218aa799002683b2e370b128654e7a
| 335,300
|
py
|
Python
|
auth3/identity/admin/admin_pb2.py
|
auth3-dev/python-sdk
|
0a31cf3307441c5ba8220ab5e1f8b30dc3780ee9
|
[
"Apache-2.0"
] | null | null | null |
auth3/identity/admin/admin_pb2.py
|
auth3-dev/python-sdk
|
0a31cf3307441c5ba8220ab5e1f8b30dc3780ee9
|
[
"Apache-2.0"
] | null | null | null |
auth3/identity/admin/admin_pb2.py
|
auth3-dev/python-sdk
|
0a31cf3307441c5ba8220ab5e1f8b30dc3780ee9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: identity/admin/admin.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='identity/admin/admin.proto',
package='depot.devtools.auth.v0.identity.admin',
syntax='proto3',
serialized_options=b'\n\030dev.auth3.identity.adminB\nAdminProtoP\001Z\'github.com/auth3-dev/go-sdk/admin;admin\370\001\001\242\002\004A3IA\252\002\024Auth3.Identity.Admin\312\002\024Auth3\\Identity\\Admin\352\002\026Auth3::Identity::Admin',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n1identity/admin/admin.proto\x12%depot.devtools.auth.v0.identity.admin\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x86\x01\n\x15\x43reateIdentityRequest\x12+\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructR\x04\x64\x61ta\x12#\n\rconnection_id\x18\x02 \x01(\tR\x0c\x63onnectionId\x12\x1b\n\tschema_id\x18\x03 \x01(\tR\x08schemaId\"9\n\x16\x43reateIdentityResponse\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\"\x16\n\x14GetIdentitiesRequest\"\xe7\x03\n\x15GetIdentitiesResponse\x12\x65\n\nidentities\x18\x01 \x03(\x0b\x32\x45.depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.IdentityR\nidentities\x1a\xbe\x02\n\x08Identity\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\x12\x39\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\'\n\x0fmain_identifier\x18\x04 \x01(\tR\x0emainIdentifier\x12\x1b\n\tschema_id\x18\x05 \x01(\tR\x08schemaId\x12U\n\x04lock\x18\x06 \x01(\x0e\x32\x41.depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.LockR\x04lock\"&\n\x04Lock\x12\x0c\n\x08UNLOCKED\x10\x00\x12\x10\n\x0c\x41\x44MIN_LOCKED\x10\x01\"5\n\x12GetIdentityRequest\x12\x1f\n\x0bidentity_id\x18\x02 \x01(\tR\nidentityId\"\xed\x04\n\x13GetIdentityResponse\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\x12\x39\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\'\n\x0fmain_identifier\x18\x04 \x01(\tR\x0emainIdentifier\x12\x1b\n\ttraits_id\x18\x05 \x01(\tR\x08traitsId\x12#\n\raddresses_ids\x18\x06 \x03(\tR\x0c\x61\x64\x64ressesIds\x12w\n\x0f\x63redentials_ids\x18\x07 \x03(\x0b\x32N.depot.devtools.auth.v0.identity.admin.GetIdentityResponse.CredentialsIdsEntryR\x0e\x63redentialsIds\x12\x1b\n\tschema_id\x18\x08 \x01(\tR\x08schemaId\x12S\n\x04lock\x18\t \x01(\x0e\x32?.depot.devtools.auth.v0.identity.admin.GetIdentityResponse.LockR\x04lock\x1a\x41\n\x13\x43redentialsIdsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"&\n\x04Lock\x12\x0c\n\x08UNLOCKED\x10\x00\x12\x10\n\x0c\x41\x44MIN_LOCKED\x10\x01\"U\n\x1fGetIdentitiesByAttributeRequest\x12\x1c\n\tattribute\x18\x01 \x01(\tR\tattribute\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"\xa6\x06\n GetIdentitiesByAttributeResponse\x12p\n\nidentities\x18\x01 \x03(\x0b\x32P.depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.IdentityR\nidentities\x1a\x8f\x05\n\x08Identity\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\x12\x39\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\'\n\x0fmain_identifier\x18\x04 \x01(\tR\x0emainIdentifier\x12\x1b\n\ttraits_id\x18\x05 \x01(\tR\x08traitsId\x12#\n\raddresses_ids\x18\x06 \x03(\tR\x0c\x61\x64\x64ressesIds\x12\x8d\x01\n\x0f\x63redentials_ids\x18\x07 \x03(\x0b\x32\x64.depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.CredentialsIdsEntryR\x0e\x63redentialsIds\x12\x1b\n\tschema_id\x18\x08 \x01(\tR\x08schemaId\x12i\n\x04lock\x18\t \x01(\x0e\x32U.depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.LockR\x04lock\x1a\x41\n\x13\x43redentialsIdsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"&\n\x04Lock\x12\x0c\n\x08UNLOCKED\x10\x00\x12\x10\n\x0c\x41\x44MIN_LOCKED\x10\x01\"\xb7\x01\n\x15UpdateIdentityRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\x12U\n\x04lock\x18\x02 \x01(\x0e\x32\x41.depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest.LockR\x04lock\"&\n\x04Lock\x12\x0c\n\x08UNLOCKED\x10\x00\x12\x10\n\x0c\x41\x44MIN_LOCKED\x10\x01\"\x18\n\x16UpdateIdentityResponse\"8\n\x15\x44\x65leteIdentityRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\"\x18\n\x16\x44\x65leteIdentityResponse\"8\n\x15GetCredentialsRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\"\xa8\x04\n\x16GetCredentialsResponse\x12p\n\x0b\x63redentials\x18\x01 \x03(\x0b\x32N.depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.CredentialsEntryR\x0b\x63redentials\x1a\x90\x02\n\nCredential\x12#\n\rcredential_id\x18\x01 \x01(\tR\x0c\x63redentialId\x12\x39\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x1f\n\x0bidentity_id\x18\x04 \x01(\tR\nidentityId\x12\x12\n\x04type\x18\x05 \x01(\x05R\x04type\x12\x12\n\x04name\x18\x06 \x01(\tR\x04name\x12\x1e\n\nconfigured\x18\x07 \x01(\x08R\nconfigured\x1a\x88\x01\n\x10\x43redentialsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12^\n\x05value\x18\x02 \x01(\x0b\x32H.depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.CredentialR\x05value:\x02\x38\x01\"\x8c\x01\n\x17UpdateCredentialRequest\x12+\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructR\x04\x64\x61ta\x12#\n\rconnection_id\x18\x02 \x01(\tR\x0c\x63onnectionId\x12\x1f\n\x0bidentity_id\x18\x03 \x01(\tR\nidentityId\"\x1a\n\x18UpdateCredentialResponse\"\x86\t\n\x17\x43reateConnectionRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1b\n\tclient_id\x18\x02 \x01(\tR\x08\x63lientId\x12#\n\rclient_secret\x18\x03 \x01(\tR\x0c\x63lientSecret\x12(\n\x10\x62utton_image_url\x18\x04 \x01(\tR\x0e\x62uttonImageUrl\x12\x64\n\x08provider\x18\x06 \x01(\x0e\x32H.depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.ProvidersR\x08provider\x12,\n\x12oidc_discovery_url\x18\x07 \x01(\tR\x10oidcDiscoveryUrl\x12\x10\n\x03mfa\x18\x08 \x03(\tR\x03mfa\x12X\n\x04type\x18\t \x01(\x0e\x32\x44.depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.TypesR\x04type\x12\x16\n\x06scopes\x18\n \x01(\tR\x06scopes\"\x93\x05\n\tProviders\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06OPENID\x10\x01\x12\n\n\x06\x41MAZON\x10\x02\x12\r\n\tBITBUCKET\x10\x03\x12\x07\n\x03\x42OX\x10\x04\x12\x0f\n\x0b\x44\x41ILYMOTION\x10\x05\x12\n\n\x06\x44\x45\x45ZER\x10\x06\x12\x10\n\x0c\x44IGITALOCEAN\x10\x07\x12\x0b\n\x07\x44ISCORD\x10\x08\x12\x0b\n\x07\x44ROPBOX\x10\t\x12\r\n\tEVEONLINE\x10\n\x12\x0c\n\x08\x46\x41\x43\x45\x42OOK\x10\x0b\x12\n\n\x06\x46ITBIT\x10\x0c\x12\t\n\x05GITEA\x10\r\x12\n\n\x06GITHUB\x10\x0e\x12\n\n\x06GITLAB\x10\x0f\x12\n\n\x06GOOGLE\x10\x10\x12\x0b\n\x07SHOPIFY\x10\x12\x12\x0e\n\nSOUNDCLOUD\x10\x13\x12\x0b\n\x07SPOTIFY\x10\x14\x12\t\n\x05STEAM\x10\x15\x12\n\n\x06STRIPE\x10\x16\x12\n\n\x06TWITCH\x10\x17\x12\x08\n\x04UBER\x10\x18\x12\t\n\x05WEPAY\x10\x19\x12\t\n\x05YAHOO\x10\x1a\x12\n\n\x06YAMMER\x10\x1b\x12\n\n\x06HEROKU\x10\x1c\x12\r\n\tINSTAGRAM\x10\x1d\x12\x0c\n\x08INTERCOM\x10\x1e\x12\t\n\x05KAKAO\x10\x1f\x12\n\n\x06LASTFM\x10 \x12\x0c\n\x08LINKEDIN\x10!\x12\x08\n\x04LINE\x10\"\x12\x0c\n\x08ONEDRIVE\x10#\x12\x0b\n\x07\x41ZUREAD\x10$\x12\x13\n\x0fMICROSOFTONLINE\x10%\x12\r\n\tBATTLENET\x10&\x12\n\n\x06PAYPAL\x10\'\x12\x0b\n\x07TWITTER\x10(\x12\x0e\n\nSALESFORCE\x10)\x12\x0c\n\x08TYPETALK\x10*\x12\t\n\x05SLACK\x10+\x12\n\n\x06MEETUP\x10,\x12\x08\n\x04XERO\x10.\x12\x06\n\x02VK\x10/\x12\t\n\x05NAVER\x10\x30\x12\n\n\x06YANDEX\x10\x31\x12\r\n\tNEXTCLOUD\x10\x32\x12\t\n\x05\x41PPLE\x10\x34\x12\n\n\x06STRAVA\x10\x35\"=\n\x05Types\x12\t\n\x05UNSET\x10\x00\x12\x0c\n\x08PASSWORD\x10\x01\x12\x07\n\x03OTP\x10\x02\x12\x08\n\x04TOTP\x10\x03\x12\x08\n\x04OIDC\x10\x04\".\n\x18\x43reateConnectionResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x17\n\x15GetConnectionsRequest\"\xc5\x0b\n\x16GetConnectionsResponse\x12j\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32H.depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.ConnectionR\x0b\x63onnections\x1a\x94\x04\n\nConnection\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1b\n\tclient_id\x18\x02 \x01(\tR\x08\x63lientId\x12#\n\rclient_secret\x18\x03 \x01(\tR\x0c\x63lientSecret\x12(\n\x10\x62utton_image_url\x18\x04 \x01(\tR\x0e\x62uttonImageUrl\x12\x63\n\x08provider\x18\x06 \x01(\x0e\x32G.depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.ProvidersR\x08provider\x12,\n\x12oidc_discovery_url\x18\x07 \x01(\tR\x10oidcDiscoveryUrl\x12\x10\n\x03mfa\x18\x08 \x03(\tR\x03mfa\x12W\n\x04type\x18\t \x01(\x0e\x32\x43.depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.TypesR\x04type\x12\x0e\n\x02id\x18\n \x01(\tR\x02id\x12\x16\n\x06scopes\x18\x0b \x01(\tR\x06scopes\x12`\n\x07purpose\x18\x0c \x01(\x0e\x32\x46.depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.PurposesR\x07purpose\"\x93\x05\n\tProviders\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06OPENID\x10\x01\x12\n\n\x06\x41MAZON\x10\x02\x12\r\n\tBITBUCKET\x10\x03\x12\x07\n\x03\x42OX\x10\x04\x12\x0f\n\x0b\x44\x41ILYMOTION\x10\x05\x12\n\n\x06\x44\x45\x45ZER\x10\x06\x12\x10\n\x0c\x44IGITALOCEAN\x10\x07\x12\x0b\n\x07\x44ISCORD\x10\x08\x12\x0b\n\x07\x44ROPBOX\x10\t\x12\r\n\tEVEONLINE\x10\n\x12\x0c\n\x08\x46\x41\x43\x45\x42OOK\x10\x0b\x12\n\n\x06\x46ITBIT\x10\x0c\x12\t\n\x05GITEA\x10\r\x12\n\n\x06GITHUB\x10\x0e\x12\n\n\x06GITLAB\x10\x0f\x12\n\n\x06GOOGLE\x10\x10\x12\x0b\n\x07SHOPIFY\x10\x12\x12\x0e\n\nSOUNDCLOUD\x10\x13\x12\x0b\n\x07SPOTIFY\x10\x14\x12\t\n\x05STEAM\x10\x15\x12\n\n\x06STRIPE\x10\x16\x12\n\n\x06TWITCH\x10\x17\x12\x08\n\x04UBER\x10\x18\x12\t\n\x05WEPAY\x10\x19\x12\t\n\x05YAHOO\x10\x1a\x12\n\n\x06YAMMER\x10\x1b\x12\n\n\x06HEROKU\x10\x1c\x12\r\n\tINSTAGRAM\x10\x1d\x12\x0c\n\x08INTERCOM\x10\x1e\x12\t\n\x05KAKAO\x10\x1f\x12\n\n\x06LASTFM\x10 \x12\x0c\n\x08LINKEDIN\x10!\x12\x08\n\x04LINE\x10\"\x12\x0c\n\x08ONEDRIVE\x10#\x12\x0b\n\x07\x41ZUREAD\x10$\x12\x13\n\x0fMICROSOFTONLINE\x10%\x12\r\n\tBATTLENET\x10&\x12\n\n\x06PAYPAL\x10\'\x12\x0b\n\x07TWITTER\x10(\x12\x0e\n\nSALESFORCE\x10)\x12\x0c\n\x08TYPETALK\x10*\x12\t\n\x05SLACK\x10+\x12\n\n\x06MEETUP\x10,\x12\x08\n\x04XERO\x10.\x12\x06\n\x02VK\x10/\x12\t\n\x05NAVER\x10\x30\x12\n\n\x06YANDEX\x10\x31\x12\r\n\tNEXTCLOUD\x10\x32\x12\t\n\x05\x41PPLE\x10\x34\x12\n\n\x06STRAVA\x10\x35\"=\n\x05Types\x12\t\n\x05UNSET\x10\x00\x12\x0c\n\x08PASSWORD\x10\x01\x12\x07\n\x03OTP\x10\x02\x12\x08\n\x04TOTP\x10\x03\x12\x08\n\x04OIDC\x10\x04\"S\n\x08Purposes\x12\x13\n\x0fPURPOSE_UNKNOWN\x10\x00\x12\x17\n\x13PURPOSE_MAIN_FACTOR\x10\x01\x12\x19\n\x15PURPOSE_SECOND_FACTOR\x10\x02\"\x96\t\n\x17UpdateConnectionRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1b\n\tclient_id\x18\x02 \x01(\tR\x08\x63lientId\x12#\n\rclient_secret\x18\x03 \x01(\tR\x0c\x63lientSecret\x12(\n\x10\x62utton_image_url\x18\x04 \x01(\tR\x0e\x62uttonImageUrl\x12\x64\n\x08provider\x18\x06 \x01(\x0e\x32H.depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.ProvidersR\x08provider\x12,\n\x12oidc_discovery_url\x18\x07 \x01(\tR\x10oidcDiscoveryUrl\x12\x10\n\x03mfa\x18\x08 \x03(\tR\x03mfa\x12X\n\x04type\x18\t \x01(\x0e\x32\x44.depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.TypesR\x04type\x12\x0e\n\x02id\x18\n \x01(\tR\x02id\x12\x16\n\x06scopes\x18\x0b \x01(\tR\x06scopes\"\x93\x05\n\tProviders\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06OPENID\x10\x01\x12\n\n\x06\x41MAZON\x10\x02\x12\r\n\tBITBUCKET\x10\x03\x12\x07\n\x03\x42OX\x10\x04\x12\x0f\n\x0b\x44\x41ILYMOTION\x10\x05\x12\n\n\x06\x44\x45\x45ZER\x10\x06\x12\x10\n\x0c\x44IGITALOCEAN\x10\x07\x12\x0b\n\x07\x44ISCORD\x10\x08\x12\x0b\n\x07\x44ROPBOX\x10\t\x12\r\n\tEVEONLINE\x10\n\x12\x0c\n\x08\x46\x41\x43\x45\x42OOK\x10\x0b\x12\n\n\x06\x46ITBIT\x10\x0c\x12\t\n\x05GITEA\x10\r\x12\n\n\x06GITHUB\x10\x0e\x12\n\n\x06GITLAB\x10\x0f\x12\n\n\x06GOOGLE\x10\x10\x12\x0b\n\x07SHOPIFY\x10\x12\x12\x0e\n\nSOUNDCLOUD\x10\x13\x12\x0b\n\x07SPOTIFY\x10\x14\x12\t\n\x05STEAM\x10\x15\x12\n\n\x06STRIPE\x10\x16\x12\n\n\x06TWITCH\x10\x17\x12\x08\n\x04UBER\x10\x18\x12\t\n\x05WEPAY\x10\x19\x12\t\n\x05YAHOO\x10\x1a\x12\n\n\x06YAMMER\x10\x1b\x12\n\n\x06HEROKU\x10\x1c\x12\r\n\tINSTAGRAM\x10\x1d\x12\x0c\n\x08INTERCOM\x10\x1e\x12\t\n\x05KAKAO\x10\x1f\x12\n\n\x06LASTFM\x10 \x12\x0c\n\x08LINKEDIN\x10!\x12\x08\n\x04LINE\x10\"\x12\x0c\n\x08ONEDRIVE\x10#\x12\x0b\n\x07\x41ZUREAD\x10$\x12\x13\n\x0fMICROSOFTONLINE\x10%\x12\r\n\tBATTLENET\x10&\x12\n\n\x06PAYPAL\x10\'\x12\x0b\n\x07TWITTER\x10(\x12\x0e\n\nSALESFORCE\x10)\x12\x0c\n\x08TYPETALK\x10*\x12\t\n\x05SLACK\x10+\x12\n\n\x06MEETUP\x10,\x12\x08\n\x04XERO\x10.\x12\x06\n\x02VK\x10/\x12\t\n\x05NAVER\x10\x30\x12\n\n\x06YANDEX\x10\x31\x12\r\n\tNEXTCLOUD\x10\x32\x12\t\n\x05\x41PPLE\x10\x34\x12\n\n\x06STRAVA\x10\x35\"=\n\x05Types\x12\t\n\x05UNSET\x10\x00\x12\x0c\n\x08PASSWORD\x10\x01\x12\x07\n\x03OTP\x10\x02\x12\x08\n\x04TOTP\x10\x03\x12\x08\n\x04OIDC\x10\x04\"\x1a\n\x18UpdateConnectionResponse\")\n\x17\x44\x65leteConnectionRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"\x1a\n\x18\x44\x65leteConnectionResponse\"\x19\n\x17GetOAuth2ClientsRequest\"\xa1\x10\n\x18GetOAuth2ClientsResponse\x12`\n\x07\x63lients\x18\x01 \x03(\x0b\x32\x46.depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.ClientR\x07\x63lients\x1a\xa2\x0f\n\x06\x43lient\x12\x30\n\x14\x61llowed_cors_origins\x18\x01 \x03(\tR\x12\x61llowedCorsOrigins\x12\x1a\n\x08\x61udience\x18\x02 \x03(\tR\x08\x61udience\x12M\n#backchannel_logout_session_required\x18\x03 \x01(\x08R backchannelLogoutSessionRequired\x12\x34\n\x16\x62\x61\x63kchannel_logout_uri\x18\x04 \x01(\tR\x14\x62\x61\x63kchannelLogoutUri\x12\x1b\n\tclient_id\x18\x05 \x01(\tR\x08\x63lientId\x12\x1f\n\x0b\x63lient_name\x18\x06 \x01(\tR\nclientName\x12\x37\n\x18\x63lient_secret_expires_at\x18\x07 \x01(\x03R\x15\x63lientSecretExpiresAt\x12\x1d\n\nclient_uri\x18\x08 \x01(\tR\tclientUri\x12\x1a\n\x08\x63ontacts\x18\t \x03(\tR\x08\x63ontacts\x12\x39\n\ncreated_at\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12O\n$frontchannel_logout_session_required\x18\x0b \x01(\x08R!frontchannelLogoutSessionRequired\x12\x36\n\x17\x66rontchannel_logout_uri\x18\x0c \x01(\tR\x15\x66rontchannelLogoutUri\x12Q\n\x0bgrant_types\x18\r \x03(\x0e\x32\x30.depot.devtools.auth.v0.identity.admin.GrantTypeR\ngrantTypes\x12+\n\x04jwks\x18\x0e \x01(\x0b\x32\x17.google.protobuf.StructR\x04jwks\x12\x19\n\x08jwks_uri\x18\x0f \x01(\tR\x07jwksUri\x12\x19\n\x08logo_uri\x18\x10 \x01(\tR\x07logoUri\x12\x33\n\x08metadata\x18\x11 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x14\n\x05owner\x18\x12 \x01(\tR\x05owner\x12\x1d\n\npolicy_uri\x18\x13 \x01(\tR\tpolicyUri\x12\x39\n\x19post_logout_redirect_uris\x18\x14 \x03(\tR\x16postLogoutRedirectUris\x12#\n\rredirect_uris\x18\x15 \x03(\tR\x0credirectUris\x12{\n\x1arequest_object_signing_alg\x18\x16 \x01(\x0e\x32>.depot.devtools.auth.v0.identity.admin.RequestObjectSigningAlgR\x17requestObjectSigningAlg\x12!\n\x0crequest_uris\x18\x17 \x03(\tR\x0brequestUris\x12Z\n\x0eresponse_types\x18\x18 \x03(\x0e\x32\x33.depot.devtools.auth.v0.identity.admin.ResponseTypeR\rresponseTypes\x12\x14\n\x05scope\x18\x19 \x01(\tR\x05scope\x12\x32\n\x15sector_identifier_uri\x18\x1a \x01(\tR\x13sectorIdentifierUri\x12U\n\x0csubject_type\x18\x1b \x01(\x0e\x32\x32.depot.devtools.auth.v0.identity.admin.SubjectTypeR\x0bsubjectType\x12{\n\x1atoken_endpoint_auth_method\x18\x1c \x01(\x0e\x32>.depot.devtools.auth.v0.identity.admin.TokenEndpointAuthMethodR\x17tokenEndpointAuthMethod\x12\x88\x01\n\x1ftoken_endpoint_auth_signing_alg\x18\x1d \x01(\x0e\x32\x42.depot.devtools.auth.v0.identity.admin.TokenEndpointAuthSigningAlgR\x1btokenEndpointAuthSigningAlg\x12\x17\n\x07tos_uri\x18\x1e \x01(\tR\x06tosUri\x12\x39\n\nupdated_at\x18\x1f \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x81\x01\n\x1cuserinfo_signed_response_alg\x18 \x01(\x0e\x32@.depot.devtools.auth.v0.identity.admin.UserinfoSignedResponseAlgR\x19userinfoSignedResponseAlg\x12R\n\x0b\x63lient_type\x18! \x01(\x0e\x32\x31.depot.devtools.auth.v0.identity.admin.ClientTypeR\nclientType\"\x8e\x0e\n\x19\x43reateOAuth2ClientRequest\x12\x30\n\x14\x61llowed_cors_origins\x18\x01 \x03(\tR\x12\x61llowedCorsOrigins\x12\x1a\n\x08\x61udience\x18\x02 \x03(\tR\x08\x61udience\x12M\n#backchannel_logout_session_required\x18\x03 \x01(\x08R backchannelLogoutSessionRequired\x12\x34\n\x16\x62\x61\x63kchannel_logout_uri\x18\x04 \x01(\tR\x14\x62\x61\x63kchannelLogoutUri\x12\x1f\n\x0b\x63lient_name\x18\x06 \x01(\tR\nclientName\x12\x1d\n\nclient_uri\x18\x08 \x01(\tR\tclientUri\x12\x1a\n\x08\x63ontacts\x18\t \x03(\tR\x08\x63ontacts\x12O\n$frontchannel_logout_session_required\x18\x0b \x01(\x08R!frontchannelLogoutSessionRequired\x12\x36\n\x17\x66rontchannel_logout_uri\x18\x0c \x01(\tR\x15\x66rontchannelLogoutUri\x12Q\n\x0bgrant_types\x18\r \x03(\x0e\x32\x30.depot.devtools.auth.v0.identity.admin.GrantTypeR\ngrantTypes\x12+\n\x04jwks\x18\x0e \x01(\x0b\x32\x17.google.protobuf.StructR\x04jwks\x12\x19\n\x08jwks_uri\x18\x0f \x01(\tR\x07jwksUri\x12\x19\n\x08logo_uri\x18\x10 \x01(\tR\x07logoUri\x12\x33\n\x08metadata\x18\x11 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x14\n\x05owner\x18\x12 \x01(\tR\x05owner\x12\x1d\n\npolicy_uri\x18\x13 \x01(\tR\tpolicyUri\x12\x39\n\x19post_logout_redirect_uris\x18\x14 \x03(\tR\x16postLogoutRedirectUris\x12#\n\rredirect_uris\x18\x15 \x03(\tR\x0credirectUris\x12{\n\x1arequest_object_signing_alg\x18\x16 \x01(\x0e\x32>.depot.devtools.auth.v0.identity.admin.RequestObjectSigningAlgR\x17requestObjectSigningAlg\x12!\n\x0crequest_uris\x18\x17 \x03(\tR\x0brequestUris\x12Z\n\x0eresponse_types\x18\x18 \x03(\x0e\x32\x33.depot.devtools.auth.v0.identity.admin.ResponseTypeR\rresponseTypes\x12\x14\n\x05scope\x18\x19 \x01(\tR\x05scope\x12\x32\n\x15sector_identifier_uri\x18\x1a \x01(\tR\x13sectorIdentifierUri\x12U\n\x0csubject_type\x18\x1b \x01(\x0e\x32\x32.depot.devtools.auth.v0.identity.admin.SubjectTypeR\x0bsubjectType\x12{\n\x1atoken_endpoint_auth_method\x18\x1c \x01(\x0e\x32>.depot.devtools.auth.v0.identity.admin.TokenEndpointAuthMethodR\x17tokenEndpointAuthMethod\x12\x88\x01\n\x1ftoken_endpoint_auth_signing_alg\x18\x1d \x01(\x0e\x32\x42.depot.devtools.auth.v0.identity.admin.TokenEndpointAuthSigningAlgR\x1btokenEndpointAuthSigningAlg\x12\x17\n\x07tos_uri\x18\x1e \x01(\tR\x06tosUri\x12\x81\x01\n\x1cuserinfo_signed_response_alg\x18\x1f \x01(\x0e\x32@.depot.devtools.auth.v0.identity.admin.UserinfoSignedResponseAlgR\x19userinfoSignedResponseAlg\x12#\n\rclient_secret\x18 \x01(\tR\x0c\x63lientSecret\x12R\n\x0b\x63lient_type\x18! \x01(\x0e\x32\x31.depot.devtools.auth.v0.identity.admin.ClientTypeR\nclientType\"^\n\x1a\x43reateOAuth2ClientResponse\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12#\n\rclient_secret\x18\x02 \x01(\tR\x0c\x63lientSecret\"\xd7\r\n\x19UpdateOAuth2ClientRequest\x12\x30\n\x14\x61llowed_cors_origins\x18\x01 \x03(\tR\x12\x61llowedCorsOrigins\x12\x1a\n\x08\x61udience\x18\x02 \x03(\tR\x08\x61udience\x12M\n#backchannel_logout_session_required\x18\x03 \x01(\x08R backchannelLogoutSessionRequired\x12\x34\n\x16\x62\x61\x63kchannel_logout_uri\x18\x04 \x01(\tR\x14\x62\x61\x63kchannelLogoutUri\x12\x1f\n\x0b\x63lient_name\x18\x06 \x01(\tR\nclientName\x12\x1d\n\nclient_uri\x18\x08 \x01(\tR\tclientUri\x12\x1a\n\x08\x63ontacts\x18\t \x03(\tR\x08\x63ontacts\x12O\n$frontchannel_logout_session_required\x18\x0b \x01(\x08R!frontchannelLogoutSessionRequired\x12\x36\n\x17\x66rontchannel_logout_uri\x18\x0c \x01(\tR\x15\x66rontchannelLogoutUri\x12Q\n\x0bgrant_types\x18\r \x03(\x0e\x32\x30.depot.devtools.auth.v0.identity.admin.GrantTypeR\ngrantTypes\x12+\n\x04jwks\x18\x0e \x01(\x0b\x32\x17.google.protobuf.StructR\x04jwks\x12\x19\n\x08jwks_uri\x18\x0f \x01(\tR\x07jwksUri\x12\x19\n\x08logo_uri\x18\x10 \x01(\tR\x07logoUri\x12\x33\n\x08metadata\x18\x11 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x14\n\x05owner\x18\x12 \x01(\tR\x05owner\x12\x1d\n\npolicy_uri\x18\x13 \x01(\tR\tpolicyUri\x12\x39\n\x19post_logout_redirect_uris\x18\x14 \x03(\tR\x16postLogoutRedirectUris\x12#\n\rredirect_uris\x18\x15 \x03(\tR\x0credirectUris\x12{\n\x1arequest_object_signing_alg\x18\x16 \x01(\x0e\x32>.depot.devtools.auth.v0.identity.admin.RequestObjectSigningAlgR\x17requestObjectSigningAlg\x12!\n\x0crequest_uris\x18\x17 \x03(\tR\x0brequestUris\x12Z\n\x0eresponse_types\x18\x18 \x03(\x0e\x32\x33.depot.devtools.auth.v0.identity.admin.ResponseTypeR\rresponseTypes\x12\x14\n\x05scope\x18\x19 \x01(\tR\x05scope\x12\x32\n\x15sector_identifier_uri\x18\x1a \x01(\tR\x13sectorIdentifierUri\x12U\n\x0csubject_type\x18\x1b \x01(\x0e\x32\x32.depot.devtools.auth.v0.identity.admin.SubjectTypeR\x0bsubjectType\x12{\n\x1atoken_endpoint_auth_method\x18\x1c \x01(\x0e\x32>.depot.devtools.auth.v0.identity.admin.TokenEndpointAuthMethodR\x17tokenEndpointAuthMethod\x12\x88\x01\n\x1ftoken_endpoint_auth_signing_alg\x18\x1d \x01(\x0e\x32\x42.depot.devtools.auth.v0.identity.admin.TokenEndpointAuthSigningAlgR\x1btokenEndpointAuthSigningAlg\x12\x17\n\x07tos_uri\x18\x1e \x01(\tR\x06tosUri\x12\x81\x01\n\x1cuserinfo_signed_response_alg\x18\x1f \x01(\x0e\x32@.depot.devtools.auth.v0.identity.admin.UserinfoSignedResponseAlgR\x19userinfoSignedResponseAlg\x12#\n\rclient_secret\x18 \x01(\tR\x0c\x63lientSecret\x12\x1b\n\tclient_id\x18! \x01(\tR\x08\x63lientId\"^\n\x1aUpdateOAuth2ClientResponse\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\x12#\n\rclient_secret\x18\x02 \x01(\tR\x0c\x63lientSecret\"8\n\x19\x44\x65leteOAuth2ClientRequest\x12\x1b\n\tclient_id\x18\x01 \x01(\tR\x08\x63lientId\"\x1c\n\x1a\x44\x65leteOAuth2ClientResponse\"6\n\x13GetAddressesRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\"\xa4\x02\n\x14GetAddressesResponse\x12\x61\n\taddresses\x18\x01 \x03(\x0b\x32\x43.depot.devtools.auth.v0.identity.admin.GetAddressesResponse.AddressR\taddresses\x1a\xa8\x01\n\x07\x41\x64\x64ress\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bidentity_id\x18\x02 \x01(\tR\nidentityId\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x18\n\x07\x61\x64\x64ress\x18\x04 \x01(\tR\x07\x61\x64\x64ress\x12\x1a\n\x08verified\x18\x05 \x01(\x08R\x08verified\x12\"\n\rid_schema_key\x18\x06 \x01(\tR\x0bidSchemaKey\"T\n\x11GetAddressRequest\x12\x10\n\x02id\x18\x01 \x01(\tH\x00R\x02id\x12$\n\x0cverification\x18\x02 \x01(\tH\x00R\x0cverificationB\x07\n\x05match\"\xb3\x01\n\x12GetAddressResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bidentity_id\x18\x02 \x01(\tR\nidentityId\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x18\n\x07\x61\x64\x64ress\x18\x04 \x01(\tR\x07\x61\x64\x64ress\x12\x1a\n\x08verified\x18\x05 \x01(\x08R\x08verified\x12\"\n\rid_schema_key\x18\x06 \x01(\tR\x0bidSchemaKey\"m\n\x14UpdateAddressRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1c\n\x08verified\x18\x02 \x01(\x08H\x00R\x08verified\x12\x1a\n\x07\x61\x64\x64ress\x18\x03 \x01(\tH\x00R\x07\x61\x64\x64ressB\x0b\n\tattribute\"\x17\n\x15UpdateAddressResponse\"3\n\x10GetTraitsRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\"+\n\x11GetTraitsResponse\x12\x16\n\x06traits\x18\x01 \x01(\tR\x06traits\"N\n\x13UpdateTraitsRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\x12\x16\n\x06traits\x18\x02 \x01(\tR\x06traits\"\x16\n\x14UpdateTraitsResponse\"B\n\x1fGetIdentityLoginAttemptsRequest\x12\x1f\n\x0bidentity_id\x18\x01 \x01(\tR\nidentityId\"\x91\x03\n GetIdentityLoginAttemptsResponse\x12k\n\x08\x61ttempts\x18\x01 \x03(\x0b\x32O.depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.AttemptR\x08\x61ttempts\x1a\xff\x01\n\x07\x41ttempt\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n\x0bidentity_id\x18\x02 \x01(\tR\nidentityId\x12\x16\n\x06status\x18\x03 \x01(\tR\x06status\x12\x35\n\x16\x61uthentication_methods\x18\x04 \x03(\tR\x15\x61uthenticationMethods\x12\x39\n\ncreated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nexpires_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\texpiresAt\"^\n\x15\x43reateIdSchemaRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x31\n\x07\x63ontent\x18\x02 \x01(\x0b\x32\x17.google.protobuf.StructR\x07\x63ontent\"(\n\x16\x43reateIdSchemaResponse\x12\x0e\n\x02id\x18\x02 \x01(\tR\x02id\"\x15\n\x13GetIdSchemasRequest\"\xfb\x02\n\x14GetIdSchemasResponse\x12h\n\x0bjsonschemas\x18\x01 \x03(\x0b\x32\x46.depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchemaR\x0bjsonschemas\x1a\xf8\x01\n\nJsonSchema\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x31\n\x07\x63ontent\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x07\x63ontent\x12\x39\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x1d\n\nis_default\x18\x06 \x01(\x08R\tisDefault\"$\n\x12GetIdSchemaRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"\x81\x02\n\x13GetIdSchemaResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x31\n\x07\x63ontent\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x07\x63ontent\x12\x39\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x1d\n\nis_default\x18\x06 \x01(\x08R\tisDefault\"\x1b\n\x19GetDefaultIdSchemaRequest\"\x88\x02\n\x1aGetDefaultIdSchemaResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x31\n\x07\x63ontent\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x07\x63ontent\x12\x39\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x1d\n\nis_default\x18\x06 \x01(\x08R\tisDefault\"n\n\x15UpdateIdSchemaRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x31\n\x07\x63ontent\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x07\x63ontent\"\x18\n\x16UpdateIdSchemaResponse\",\n\x1aMarkDefaultIdSchemaRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"\x1d\n\x1bMarkDefaultIdSchemaResponse\"\'\n\x15\x44\x65leteIdSchemaRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\"\x18\n\x16\x44\x65leteIdSchemaResponse\"\x17\n\x15GetEmailsSetupRequest\"\xcc\x07\n\x16GetEmailsSetupResponse\x12\x65\n\x07welcome\x18\x01 \x01(\x0b\x32K.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplateR\x07welcome\x12~\n\x14\x61\x63\x63ount_verification\x18\x02 \x01(\x0b\x32K.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplateR\x13\x61\x63\x63ountVerification\x12v\n\x10\x61\x63\x63ount_recovery\x18\x03 \x01(\x0b\x32K.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplateR\x0f\x61\x63\x63ountRecovery\x12x\n\x11\x61\x63\x63ount_recovered\x18\x04 \x01(\x0b\x32K.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplateR\x10\x61\x63\x63ountRecovered\x12]\n\x03otp\x18\x05 \x01(\x0b\x32K.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplateR\x03otp\x12]\n\x04smtp\x18\x06 \x01(\x0b\x32I.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSenderR\x04smtp\x12\x1d\n\nproject_id\x18\x07 \x01(\tR\tprojectId\x1a\x43\n\rEmailTemplate\x12\x18\n\x07\x63ontent\x18\x03 \x01(\tR\x07\x63ontent\x12\x18\n\x07subject\x18\x04 \x01(\tR\x07subject\x1a\xb6\x01\n\x0b\x45mailSender\x12#\n\remail_address\x18\x01 \x01(\tR\x0c\x65mailAddress\x12\x1b\n\tsmtp_host\x18\x02 \x01(\tR\x08smtpHost\x12\x1b\n\tsmtp_port\x18\x03 \x01(\rR\x08smtpPort\x12#\n\rsmtp_username\x18\x04 \x01(\tR\x0csmtpUsername\x12#\n\rsmtp_password\x18\x05 \x01(\tR\x0csmtpPassword\"\xbc\x07\n\x18UpdateEmailsSetupRequest\x12g\n\x07welcome\x18\x01 \x01(\x0b\x32M.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplateR\x07welcome\x12\x80\x01\n\x14\x61\x63\x63ount_verification\x18\x02 \x01(\x0b\x32M.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplateR\x13\x61\x63\x63ountVerification\x12x\n\x10\x61\x63\x63ount_recovery\x18\x03 \x01(\x0b\x32M.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplateR\x0f\x61\x63\x63ountRecovery\x12z\n\x11\x61\x63\x63ount_recovered\x18\x04 \x01(\x0b\x32M.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplateR\x10\x61\x63\x63ountRecovered\x12_\n\x03otp\x18\x05 \x01(\x0b\x32M.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplateR\x03otp\x12_\n\x04smtp\x18\x06 \x01(\x0b\x32K.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSenderR\x04smtp\x1a\x43\n\rEmailTemplate\x12\x18\n\x07\x63ontent\x18\x01 \x01(\tR\x07\x63ontent\x12\x18\n\x07subject\x18\x02 \x01(\tR\x07subject\x1a\xb6\x01\n\x0b\x45mailSender\x12#\n\remail_address\x18\x01 \x01(\tR\x0c\x65mailAddress\x12\x1b\n\tsmtp_host\x18\x02 \x01(\tR\x08smtpHost\x12\x1b\n\tsmtp_port\x18\x03 \x01(\rR\x08smtpPort\x12#\n\rsmtp_username\x18\x04 \x01(\tR\x0csmtpUsername\x12#\n\rsmtp_password\x18\x05 \x01(\tR\x0csmtpPassword\"\x1b\n\x19UpdateEmailsSetupResponse\"\xcb\x01\n\x1cGetUserBaseStatisticsRequest\x12\x1f\n\x0b\x64\x61ys_before\x18\x01 \x01(\rR\ndaysBefore\x12\x46\n\x11lifetime_start_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0flifetimeStartAt\x12\x42\n\x0flifetime_end_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\rlifetimeEndAt\"\xa1\x04\n\x1dGetUserBaseStatisticsResponse\x12\x1f\n\x0btotal_users\x18\x01 \x01(\x05R\ntotalUsers\x12y\n\rusers_per_day\x18\x02 \x03(\x0b\x32U.depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.UsersPerDayEntryR\x0busersPerDay\x12\x82\x01\n\x10sessions_per_day\x18\x03 \x03(\x0b\x32X.depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.SessionsPerDayEntryR\x0esessionsPerDay\x12,\n\x12total_active_users\x18\x04 \x01(\x05R\x10totalActiveUsers\x12.\n\x13total_lifetime_used\x18\x05 \x01(\x01R\x11totalLifetimeUsed\x1a>\n\x10UsersPerDayEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x05R\x05value:\x02\x38\x01\x1a\x41\n\x13SessionsPerDayEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\x05R\x05value:\x02\x38\x01*l\n\nClientType\x12\x15\n\x11\x43LIENT_TYPE_UNSET\x10\x00\x12\x16\n\x12\x43LIENT_FIRST_PARTY\x10\x01\x12\x16\n\x12\x43LIENT_THIRD_PARTY\x10\x02\x12\x17\n\x13\x43LIENT_SYSTEM_PARTY\x10\x03*A\n\x0bSubjectType\x12\x17\n\x13SUBJECT_TYPE_PUBLIC\x10\x00\x12\x19\n\x15SUBJECT_TYPE_PAIRWISE\x10\x01*\xd5\x01\n\x17TokenEndpointAuthMethod\x12\x32\n.TOKEN_ENDPOINT_AUTH_METHOD_CLIENT_SECRET_BASIC\x10\x00\x12\x31\n-TOKEN_ENDPOINT_AUTH_METHOD_CLIENT_SECRET_POST\x10\x01\x12.\n*TOKEN_ENDPOINT_AUTH_METHOD_PRIVATE_KEY_JWT\x10\x02\x12#\n\x1fTOKEN_ENDPOINT_AUTH_METHOD_NONE\x10\x03*\xcb\x03\n\x1bTokenEndpointAuthSigningAlg\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_UNSET\x10\x00\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS256\x10\x01\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS384\x10\x02\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS512\x10\x03\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS256\x10\x04\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS384\x10\x05\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS512\x10\x06\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES256\x10\x07\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES384\x10\x08\x12)\n%TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES512\x10\t*\xba\x01\n\tGrantType\x12\x14\n\x10GRANT_TYPE_UNSET\x10\x00\x12!\n\x1dGRANT_TYPE_CLIENT_CREDENTIALS\x10\x01\x12!\n\x1dGRANT_TYPE_AUTHORIZATION_CODE\x10\x02\x12\x17\n\x13GRANT_TYPE_IMPLICIT\x10\x03\x12\x1c\n\x18GRANT_TYPE_REFRESH_TOKEN\x10\x04\x12\x1a\n\x16GRANT_TYPE_DEVICE_CODE\x10\x05*\x94\x02\n\x0cResponseType\x12\x17\n\x13RESPONSE_TYPE_UNSET\x10\x00\x12\x16\n\x12RESPONSE_TYPE_NONE\x10\x01\x12\x1a\n\x16RESPONSE_TYPE_ID_TOKEN\x10\x02\x12\x17\n\x13RESPONSE_TYPE_TOKEN\x10\x03\x12\x16\n\x12RESPONSE_TYPE_CODE\x10\x04\x12 \n\x1cRESPONSE_TYPE_ID_TOKEN_TOKEN\x10\x05\x12\x1f\n\x1bRESPONSE_TYPE_CODE_ID_TOKEN\x10\x06\x12\x1c\n\x18RESPONSE_TYPE_CODE_TOKEN\x10\x07\x12%\n!RESPONSE_TYPE_CODE_ID_TOKEN_TOKEN\x10\x08*\x8a\x01\n\x17RequestObjectSigningAlg\x12$\n REQUEST_OBJECT_SIGNING_ALG_UNSET\x10\x00\x12#\n\x1fREQUEST_OBJECT_SIGNING_ALG_NONE\x10\x01\x12$\n REQUEST_OBJECT_SIGNING_ALG_RS256\x10\x02*\x92\x01\n\x19UserinfoSignedResponseAlg\x12&\n\"USERINFO_SIGNED_RESPONSE_ALG_UNSET\x10\x00\x12%\n!USERINFO_SIGNED_RESPONSE_ALG_NONE\x10\x01\x12&\n\"USERINFO_SIGNED_RESPONSE_ALG_RS256\x10\x02\x32\xf8$\n\x05\x41\x64min\x12\x8d\x01\n\x0e\x43reateIdentity\x12<.depot.devtools.auth.v0.identity.admin.CreateIdentityRequest\x1a=.depot.devtools.auth.v0.identity.admin.CreateIdentityResponse\x12\x84\x01\n\x0bGetIdentity\x12\x39.depot.devtools.auth.v0.identity.admin.GetIdentityRequest\x1a:.depot.devtools.auth.v0.identity.admin.GetIdentityResponse\x12\xab\x01\n\x18GetIdentitiesByAttribute\x12\x46.depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeRequest\x1aG.depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse\x12\x8a\x01\n\rGetIdentities\x12;.depot.devtools.auth.v0.identity.admin.GetIdentitiesRequest\x1a<.depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse\x12\x8d\x01\n\x0eUpdateIdentity\x12<.depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest\x1a=.depot.devtools.auth.v0.identity.admin.UpdateIdentityResponse\x12\x8d\x01\n\x0e\x44\x65leteIdentity\x12<.depot.devtools.auth.v0.identity.admin.DeleteIdentityRequest\x1a=.depot.devtools.auth.v0.identity.admin.DeleteIdentityResponse\x12\x87\x01\n\x0cGetAddresses\x12:.depot.devtools.auth.v0.identity.admin.GetAddressesRequest\x1a;.depot.devtools.auth.v0.identity.admin.GetAddressesResponse\x12\x81\x01\n\nGetAddress\x12\x38.depot.devtools.auth.v0.identity.admin.GetAddressRequest\x1a\x39.depot.devtools.auth.v0.identity.admin.GetAddressResponse\x12\x8a\x01\n\rUpdateAddress\x12;.depot.devtools.auth.v0.identity.admin.UpdateAddressRequest\x1a<.depot.devtools.auth.v0.identity.admin.UpdateAddressResponse\x12~\n\tGetTraits\x12\x37.depot.devtools.auth.v0.identity.admin.GetTraitsRequest\x1a\x38.depot.devtools.auth.v0.identity.admin.GetTraitsResponse\x12\x87\x01\n\x0cUpdateTraits\x12:.depot.devtools.auth.v0.identity.admin.UpdateTraitsRequest\x1a;.depot.devtools.auth.v0.identity.admin.UpdateTraitsResponse\x12\x8d\x01\n\x0eGetCredentials\x12<.depot.devtools.auth.v0.identity.admin.GetCredentialsRequest\x1a=.depot.devtools.auth.v0.identity.admin.GetCredentialsResponse\x12\x93\x01\n\x10UpdateCredential\x12>.depot.devtools.auth.v0.identity.admin.UpdateCredentialRequest\x1a?.depot.devtools.auth.v0.identity.admin.UpdateCredentialResponse\x12\xab\x01\n\x18GetIdentityLoginAttempts\x12\x46.depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsRequest\x1aG.depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse\x12\x93\x01\n\x10\x43reateConnection\x12>.depot.devtools.auth.v0.identity.admin.CreateConnectionRequest\x1a?.depot.devtools.auth.v0.identity.admin.CreateConnectionResponse\x12\x8d\x01\n\x0eGetConnections\x12<.depot.devtools.auth.v0.identity.admin.GetConnectionsRequest\x1a=.depot.devtools.auth.v0.identity.admin.GetConnectionsResponse\x12\x93\x01\n\x10UpdateConnection\x12>.depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest\x1a?.depot.devtools.auth.v0.identity.admin.UpdateConnectionResponse\x12\x93\x01\n\x10\x44\x65leteConnection\x12>.depot.devtools.auth.v0.identity.admin.DeleteConnectionRequest\x1a?.depot.devtools.auth.v0.identity.admin.DeleteConnectionResponse\x12\x8d\x01\n\x0e\x43reateIdSchema\x12<.depot.devtools.auth.v0.identity.admin.CreateIdSchemaRequest\x1a=.depot.devtools.auth.v0.identity.admin.CreateIdSchemaResponse\x12\x87\x01\n\x0cGetIdSchemas\x12:.depot.devtools.auth.v0.identity.admin.GetIdSchemasRequest\x1a;.depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse\x12\x84\x01\n\x0bGetIdSchema\x12\x39.depot.devtools.auth.v0.identity.admin.GetIdSchemaRequest\x1a:.depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse\x12\x99\x01\n\x12GetDefaultIdSchema\x12@.depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaRequest\x1a\x41.depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse\x12\x8d\x01\n\x0eUpdateIdSchema\x12<.depot.devtools.auth.v0.identity.admin.UpdateIdSchemaRequest\x1a=.depot.devtools.auth.v0.identity.admin.UpdateIdSchemaResponse\x12\x9c\x01\n\x13MarkDefaultIdSchema\x12\x41.depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaRequest\x1a\x42.depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaResponse\x12\x8d\x01\n\x0e\x44\x65leteIdSchema\x12<.depot.devtools.auth.v0.identity.admin.DeleteIdSchemaRequest\x1a=.depot.devtools.auth.v0.identity.admin.DeleteIdSchemaResponse\x12\x99\x01\n\x12\x43reateOAuth2Client\x12@.depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest\x1a\x41.depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientResponse\x12\x93\x01\n\x10GetOAuth2Clients\x12>.depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsRequest\x1a?.depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse\x12\x99\x01\n\x12UpdateOAuth2Client\x12@.depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest\x1a\x41.depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientResponse\x12\x99\x01\n\x12\x44\x65leteOAuth2Client\x12@.depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientRequest\x1a\x41.depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientResponse\x12\x8d\x01\n\x0eGetEmailsSetup\x12<.depot.devtools.auth.v0.identity.admin.GetEmailsSetupRequest\x1a=.depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse\x12\x96\x01\n\x11UpdateEmailsSetup\x12?.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest\x1a@.depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupResponse\x12\xa2\x01\n\x15GetUserBaseStatistics\x12\x43.depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsRequest\x1a\x44.depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponseB\xa2\x01\n\x18\x64\x65v.auth3.identity.adminB\nAdminProtoP\x01Z\'github.com/auth3-dev/go-sdk/admin;admin\xf8\x01\x01\xa2\x02\x04\x41\x33IA\xaa\x02\x14\x41uth3.Identity.Admin\xca\x02\x14\x41uth3\\Identity\\Admin\xea\x02\x16\x41uth3::Identity::Adminb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_CLIENTTYPE = _descriptor.EnumDescriptor(
name='ClientType',
full_name='depot.devtools.auth.v0.identity.admin.ClientType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CLIENT_TYPE_UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CLIENT_FIRST_PARTY', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CLIENT_THIRD_PARTY', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CLIENT_SYSTEM_PARTY', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=19054,
serialized_end=19162,
)
_sym_db.RegisterEnumDescriptor(_CLIENTTYPE)
ClientType = enum_type_wrapper.EnumTypeWrapper(_CLIENTTYPE)
_SUBJECTTYPE = _descriptor.EnumDescriptor(
name='SubjectType',
full_name='depot.devtools.auth.v0.identity.admin.SubjectType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SUBJECT_TYPE_PUBLIC', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SUBJECT_TYPE_PAIRWISE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=19164,
serialized_end=19229,
)
_sym_db.RegisterEnumDescriptor(_SUBJECTTYPE)
SubjectType = enum_type_wrapper.EnumTypeWrapper(_SUBJECTTYPE)
_TOKENENDPOINTAUTHMETHOD = _descriptor.EnumDescriptor(
name='TokenEndpointAuthMethod',
full_name='depot.devtools.auth.v0.identity.admin.TokenEndpointAuthMethod',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_METHOD_CLIENT_SECRET_BASIC', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_METHOD_CLIENT_SECRET_POST', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_METHOD_PRIVATE_KEY_JWT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_METHOD_NONE', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=19232,
serialized_end=19445,
)
_sym_db.RegisterEnumDescriptor(_TOKENENDPOINTAUTHMETHOD)
TokenEndpointAuthMethod = enum_type_wrapper.EnumTypeWrapper(_TOKENENDPOINTAUTHMETHOD)
_TOKENENDPOINTAUTHSIGNINGALG = _descriptor.EnumDescriptor(
name='TokenEndpointAuthSigningAlg',
full_name='depot.devtools.auth.v0.identity.admin.TokenEndpointAuthSigningAlg',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS256', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS384', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS512', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS256', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS384', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS512', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES256', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES384', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES512', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=19448,
serialized_end=19907,
)
_sym_db.RegisterEnumDescriptor(_TOKENENDPOINTAUTHSIGNINGALG)
TokenEndpointAuthSigningAlg = enum_type_wrapper.EnumTypeWrapper(_TOKENENDPOINTAUTHSIGNINGALG)
_GRANTTYPE = _descriptor.EnumDescriptor(
name='GrantType',
full_name='depot.devtools.auth.v0.identity.admin.GrantType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='GRANT_TYPE_UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GRANT_TYPE_CLIENT_CREDENTIALS', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GRANT_TYPE_AUTHORIZATION_CODE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GRANT_TYPE_IMPLICIT', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GRANT_TYPE_REFRESH_TOKEN', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GRANT_TYPE_DEVICE_CODE', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=19910,
serialized_end=20096,
)
_sym_db.RegisterEnumDescriptor(_GRANTTYPE)
GrantType = enum_type_wrapper.EnumTypeWrapper(_GRANTTYPE)
_RESPONSETYPE = _descriptor.EnumDescriptor(
name='ResponseType',
full_name='depot.devtools.auth.v0.identity.admin.ResponseType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_NONE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_ID_TOKEN', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_TOKEN', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_CODE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_ID_TOKEN_TOKEN', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_CODE_ID_TOKEN', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_CODE_TOKEN', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_TYPE_CODE_ID_TOKEN_TOKEN', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=20099,
serialized_end=20375,
)
_sym_db.RegisterEnumDescriptor(_RESPONSETYPE)
ResponseType = enum_type_wrapper.EnumTypeWrapper(_RESPONSETYPE)
_REQUESTOBJECTSIGNINGALG = _descriptor.EnumDescriptor(
name='RequestObjectSigningAlg',
full_name='depot.devtools.auth.v0.identity.admin.RequestObjectSigningAlg',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='REQUEST_OBJECT_SIGNING_ALG_UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REQUEST_OBJECT_SIGNING_ALG_NONE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REQUEST_OBJECT_SIGNING_ALG_RS256', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=20378,
serialized_end=20516,
)
_sym_db.RegisterEnumDescriptor(_REQUESTOBJECTSIGNINGALG)
RequestObjectSigningAlg = enum_type_wrapper.EnumTypeWrapper(_REQUESTOBJECTSIGNINGALG)
_USERINFOSIGNEDRESPONSEALG = _descriptor.EnumDescriptor(
name='UserinfoSignedResponseAlg',
full_name='depot.devtools.auth.v0.identity.admin.UserinfoSignedResponseAlg',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='USERINFO_SIGNED_RESPONSE_ALG_UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='USERINFO_SIGNED_RESPONSE_ALG_NONE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='USERINFO_SIGNED_RESPONSE_ALG_RS256', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=20519,
serialized_end=20665,
)
_sym_db.RegisterEnumDescriptor(_USERINFOSIGNEDRESPONSEALG)
UserinfoSignedResponseAlg = enum_type_wrapper.EnumTypeWrapper(_USERINFOSIGNEDRESPONSEALG)
CLIENT_TYPE_UNSET = 0
CLIENT_FIRST_PARTY = 1
CLIENT_THIRD_PARTY = 2
CLIENT_SYSTEM_PARTY = 3
SUBJECT_TYPE_PUBLIC = 0
SUBJECT_TYPE_PAIRWISE = 1
TOKEN_ENDPOINT_AUTH_METHOD_CLIENT_SECRET_BASIC = 0
TOKEN_ENDPOINT_AUTH_METHOD_CLIENT_SECRET_POST = 1
TOKEN_ENDPOINT_AUTH_METHOD_PRIVATE_KEY_JWT = 2
TOKEN_ENDPOINT_AUTH_METHOD_NONE = 3
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_UNSET = 0
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS256 = 1
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS384 = 2
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_RS512 = 3
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS256 = 4
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS384 = 5
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_PS512 = 6
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES256 = 7
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES384 = 8
TOKEN_ENDPOINT_AUTH_SIGNING_ALG_ES512 = 9
GRANT_TYPE_UNSET = 0
GRANT_TYPE_CLIENT_CREDENTIALS = 1
GRANT_TYPE_AUTHORIZATION_CODE = 2
GRANT_TYPE_IMPLICIT = 3
GRANT_TYPE_REFRESH_TOKEN = 4
GRANT_TYPE_DEVICE_CODE = 5
RESPONSE_TYPE_UNSET = 0
RESPONSE_TYPE_NONE = 1
RESPONSE_TYPE_ID_TOKEN = 2
RESPONSE_TYPE_TOKEN = 3
RESPONSE_TYPE_CODE = 4
RESPONSE_TYPE_ID_TOKEN_TOKEN = 5
RESPONSE_TYPE_CODE_ID_TOKEN = 6
RESPONSE_TYPE_CODE_TOKEN = 7
RESPONSE_TYPE_CODE_ID_TOKEN_TOKEN = 8
REQUEST_OBJECT_SIGNING_ALG_UNSET = 0
REQUEST_OBJECT_SIGNING_ALG_NONE = 1
REQUEST_OBJECT_SIGNING_ALG_RS256 = 2
USERINFO_SIGNED_RESPONSE_ALG_UNSET = 0
USERINFO_SIGNED_RESPONSE_ALG_NONE = 1
USERINFO_SIGNED_RESPONSE_ALG_RS256 = 2
_GETIDENTITIESRESPONSE_LOCK = _descriptor.EnumDescriptor(
name='Lock',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Lock',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNLOCKED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ADMIN_LOCKED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=825,
serialized_end=863,
)
_sym_db.RegisterEnumDescriptor(_GETIDENTITIESRESPONSE_LOCK)
_GETIDENTITYRESPONSE_LOCK = _descriptor.EnumDescriptor(
name='Lock',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.Lock',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNLOCKED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ADMIN_LOCKED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=825,
serialized_end=863,
)
_sym_db.RegisterEnumDescriptor(_GETIDENTITYRESPONSE_LOCK)
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_LOCK = _descriptor.EnumDescriptor(
name='Lock',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.Lock',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNLOCKED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ADMIN_LOCKED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=825,
serialized_end=863,
)
_sym_db.RegisterEnumDescriptor(_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_LOCK)
_UPDATEIDENTITYREQUEST_LOCK = _descriptor.EnumDescriptor(
name='Lock',
full_name='depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest.Lock',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNLOCKED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ADMIN_LOCKED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=825,
serialized_end=863,
)
_sym_db.RegisterEnumDescriptor(_UPDATEIDENTITYREQUEST_LOCK)
_CREATECONNECTIONREQUEST_PROVIDERS = _descriptor.EnumDescriptor(
name='Providers',
full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.Providers',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OPENID', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AMAZON', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BITBUCKET', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BOX', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DAILYMOTION', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEEZER', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DIGITALOCEAN', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISCORD', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DROPBOX', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EVEONLINE', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FACEBOOK', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FITBIT', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITEA', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITHUB', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITLAB', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOOGLE', index=16, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHOPIFY', index=17, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SOUNDCLOUD', index=18, number=19,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPOTIFY', index=19, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STEAM', index=20, number=21,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRIPE', index=21, number=22,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITCH', index=22, number=23,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UBER', index=23, number=24,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEPAY', index=24, number=25,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YAHOO', index=25, number=26,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YAMMER', index=26, number=27,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HEROKU', index=27, number=28,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INSTAGRAM', index=28, number=29,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INTERCOM', index=29, number=30,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KAKAO', index=30, number=31,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LASTFM', index=31, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINKEDIN', index=32, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINE', index=33, number=34,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONEDRIVE', index=34, number=35,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AZUREAD', index=35, number=36,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MICROSOFTONLINE', index=36, number=37,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BATTLENET', index=37, number=38,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PAYPAL', index=38, number=39,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITTER', index=39, number=40,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SALESFORCE', index=40, number=41,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TYPETALK', index=41, number=42,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SLACK', index=42, number=43,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MEETUP', index=43, number=44,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XERO', index=44, number=46,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VK', index=45, number=47,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NAVER', index=46, number=48,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YANDEX', index=47, number=49,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEXTCLOUD', index=48, number=50,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='APPLE', index=49, number=52,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRAVA', index=50, number=53,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3957,
serialized_end=4616,
)
_sym_db.RegisterEnumDescriptor(_CREATECONNECTIONREQUEST_PROVIDERS)
_CREATECONNECTIONREQUEST_TYPES = _descriptor.EnumDescriptor(
name='Types',
full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.Types',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PASSWORD', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OTP', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOTP', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OIDC', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4618,
serialized_end=4679,
)
_sym_db.RegisterEnumDescriptor(_CREATECONNECTIONREQUEST_TYPES)
_GETCONNECTIONSRESPONSE_PROVIDERS = _descriptor.EnumDescriptor(
name='Providers',
full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Providers',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OPENID', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AMAZON', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BITBUCKET', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BOX', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DAILYMOTION', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEEZER', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DIGITALOCEAN', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISCORD', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DROPBOX', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EVEONLINE', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FACEBOOK', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FITBIT', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITEA', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITHUB', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITLAB', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOOGLE', index=16, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHOPIFY', index=17, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SOUNDCLOUD', index=18, number=19,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPOTIFY', index=19, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STEAM', index=20, number=21,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRIPE', index=21, number=22,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITCH', index=22, number=23,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UBER', index=23, number=24,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEPAY', index=24, number=25,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YAHOO', index=25, number=26,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YAMMER', index=26, number=27,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HEROKU', index=27, number=28,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INSTAGRAM', index=28, number=29,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INTERCOM', index=29, number=30,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KAKAO', index=30, number=31,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LASTFM', index=31, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINKEDIN', index=32, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINE', index=33, number=34,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONEDRIVE', index=34, number=35,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AZUREAD', index=35, number=36,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MICROSOFTONLINE', index=36, number=37,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BATTLENET', index=37, number=38,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PAYPAL', index=38, number=39,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITTER', index=39, number=40,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SALESFORCE', index=40, number=41,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TYPETALK', index=41, number=42,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SLACK', index=42, number=43,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MEETUP', index=43, number=44,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XERO', index=44, number=46,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VK', index=45, number=47,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NAVER', index=46, number=48,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YANDEX', index=47, number=49,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEXTCLOUD', index=48, number=50,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='APPLE', index=49, number=52,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRAVA', index=50, number=53,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3957,
serialized_end=4616,
)
_sym_db.RegisterEnumDescriptor(_GETCONNECTIONSRESPONSE_PROVIDERS)
_GETCONNECTIONSRESPONSE_TYPES = _descriptor.EnumDescriptor(
name='Types',
full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Types',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PASSWORD', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OTP', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOTP', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OIDC', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4618,
serialized_end=4679,
)
_sym_db.RegisterEnumDescriptor(_GETCONNECTIONSRESPONSE_TYPES)
_GETCONNECTIONSRESPONSE_PURPOSES = _descriptor.EnumDescriptor(
name='Purposes',
full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Purposes',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='PURPOSE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PURPOSE_MAIN_FACTOR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PURPOSE_SECOND_FACTOR', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6149,
serialized_end=6232,
)
_sym_db.RegisterEnumDescriptor(_GETCONNECTIONSRESPONSE_PURPOSES)
_UPDATECONNECTIONREQUEST_PROVIDERS = _descriptor.EnumDescriptor(
name='Providers',
full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.Providers',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OPENID', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AMAZON', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BITBUCKET', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BOX', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DAILYMOTION', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEEZER', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DIGITALOCEAN', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISCORD', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DROPBOX', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EVEONLINE', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FACEBOOK', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FITBIT', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITEA', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITHUB', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GITLAB', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOOGLE', index=16, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHOPIFY', index=17, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SOUNDCLOUD', index=18, number=19,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPOTIFY', index=19, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STEAM', index=20, number=21,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRIPE', index=21, number=22,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITCH', index=22, number=23,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UBER', index=23, number=24,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEPAY', index=24, number=25,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YAHOO', index=25, number=26,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YAMMER', index=26, number=27,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HEROKU', index=27, number=28,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INSTAGRAM', index=28, number=29,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INTERCOM', index=29, number=30,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KAKAO', index=30, number=31,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LASTFM', index=31, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINKEDIN', index=32, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINE', index=33, number=34,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONEDRIVE', index=34, number=35,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AZUREAD', index=35, number=36,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MICROSOFTONLINE', index=36, number=37,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BATTLENET', index=37, number=38,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PAYPAL', index=38, number=39,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITTER', index=39, number=40,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SALESFORCE', index=40, number=41,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TYPETALK', index=41, number=42,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SLACK', index=42, number=43,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MEETUP', index=43, number=44,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XERO', index=44, number=46,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VK', index=45, number=47,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NAVER', index=46, number=48,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YANDEX', index=47, number=49,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEXTCLOUD', index=48, number=50,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='APPLE', index=49, number=52,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STRAVA', index=50, number=53,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3957,
serialized_end=4616,
)
_sym_db.RegisterEnumDescriptor(_UPDATECONNECTIONREQUEST_PROVIDERS)
_UPDATECONNECTIONREQUEST_TYPES = _descriptor.EnumDescriptor(
name='Types',
full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.Types',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PASSWORD', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OTP', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOTP', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OIDC', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4618,
serialized_end=4679,
)
_sym_db.RegisterEnumDescriptor(_UPDATECONNECTIONREQUEST_TYPES)
_CREATEIDENTITYREQUEST = _descriptor.Descriptor(
name='CreateIdentityRequest',
full_name='depot.devtools.auth.v0.identity.admin.CreateIdentityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='depot.devtools.auth.v0.identity.admin.CreateIdentityRequest.data', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='data', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection_id', full_name='depot.devtools.auth.v0.identity.admin.CreateIdentityRequest.connection_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='connectionId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_id', full_name='depot.devtools.auth.v0.identity.admin.CreateIdentityRequest.schema_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='schemaId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=156,
serialized_end=290,
)
_CREATEIDENTITYRESPONSE = _descriptor.Descriptor(
name='CreateIdentityResponse',
full_name='depot.devtools.auth.v0.identity.admin.CreateIdentityResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.CreateIdentityResponse.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=292,
serialized_end=349,
)
_GETIDENTITIESREQUEST = _descriptor.Descriptor(
name='GetIdentitiesRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=351,
serialized_end=373,
)
_GETIDENTITIESRESPONSE_IDENTITY = _descriptor.Descriptor(
name='Identity',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity.created_at', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity.updated_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='main_identifier', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity.main_identifier', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mainIdentifier', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity.schema_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='schemaId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lock', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity.lock', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lock', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=505,
serialized_end=823,
)
_GETIDENTITIESRESPONSE = _descriptor.Descriptor(
name='GetIdentitiesResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identities', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.identities', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identities', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETIDENTITIESRESPONSE_IDENTITY, ],
enum_types=[
_GETIDENTITIESRESPONSE_LOCK,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=376,
serialized_end=863,
)
_GETIDENTITYREQUEST = _descriptor.Descriptor(
name='GetIdentityRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityRequest.identity_id', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=865,
serialized_end=918,
)
_GETIDENTITYRESPONSE_CREDENTIALSIDSENTRY = _descriptor.Descriptor(
name='CredentialsIdsEntry',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.CredentialsIdsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.CredentialsIdsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='key', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.CredentialsIdsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1437,
serialized_end=1502,
)
_GETIDENTITYRESPONSE = _descriptor.Descriptor(
name='GetIdentityResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.created_at', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.updated_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='main_identifier', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.main_identifier', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mainIdentifier', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='traits_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.traits_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='traitsId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='addresses_ids', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.addresses_ids', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addressesIds', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='credentials_ids', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.credentials_ids', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='credentialsIds', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.schema_id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='schemaId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lock', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityResponse.lock', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lock', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETIDENTITYRESPONSE_CREDENTIALSIDSENTRY, ],
enum_types=[
_GETIDENTITYRESPONSE_LOCK,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=921,
serialized_end=1542,
)
_GETIDENTITIESBYATTRIBUTEREQUEST = _descriptor.Descriptor(
name='GetIdentitiesByAttributeRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attribute', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeRequest.attribute', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='attribute', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeRequest.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1544,
serialized_end=1629,
)
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_CREDENTIALSIDSENTRY = _descriptor.Descriptor(
name='CredentialsIdsEntry',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.CredentialsIdsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.CredentialsIdsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='key', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.CredentialsIdsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1437,
serialized_end=1502,
)
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY = _descriptor.Descriptor(
name='Identity',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.created_at', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.updated_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='main_identifier', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.main_identifier', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mainIdentifier', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='traits_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.traits_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='traitsId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='addresses_ids', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.addresses_ids', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addressesIds', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='credentials_ids', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.credentials_ids', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='credentialsIds', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.schema_id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='schemaId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lock', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.lock', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lock', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_CREDENTIALSIDSENTRY, ],
enum_types=[
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_LOCK,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1783,
serialized_end=2438,
)
_GETIDENTITIESBYATTRIBUTERESPONSE = _descriptor.Descriptor(
name='GetIdentitiesByAttributeResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identities', full_name='depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.identities', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identities', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1632,
serialized_end=2438,
)
_UPDATEIDENTITYREQUEST = _descriptor.Descriptor(
name='UpdateIdentityRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lock', full_name='depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest.lock', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lock', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_UPDATEIDENTITYREQUEST_LOCK,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2441,
serialized_end=2624,
)
_UPDATEIDENTITYRESPONSE = _descriptor.Descriptor(
name='UpdateIdentityResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateIdentityResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2626,
serialized_end=2650,
)
_DELETEIDENTITYREQUEST = _descriptor.Descriptor(
name='DeleteIdentityRequest',
full_name='depot.devtools.auth.v0.identity.admin.DeleteIdentityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.DeleteIdentityRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2652,
serialized_end=2708,
)
_DELETEIDENTITYRESPONSE = _descriptor.Descriptor(
name='DeleteIdentityResponse',
full_name='depot.devtools.auth.v0.identity.admin.DeleteIdentityResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2710,
serialized_end=2734,
)
_GETCREDENTIALSREQUEST = _descriptor.Descriptor(
name='GetCredentialsRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2736,
serialized_end=2792,
)
_GETCREDENTIALSRESPONSE_CREDENTIAL = _descriptor.Descriptor(
name='Credential',
full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='credential_id', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.credential_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='credentialId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.created_at', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.updated_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.identity_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.type', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='type', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='configured', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential.configured', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='configured', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2936,
serialized_end=3208,
)
_GETCREDENTIALSRESPONSE_CREDENTIALSENTRY = _descriptor.Descriptor(
name='CredentialsEntry',
full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.CredentialsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.CredentialsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='key', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.CredentialsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3211,
serialized_end=3347,
)
_GETCREDENTIALSRESPONSE = _descriptor.Descriptor(
name='GetCredentialsResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='credentials', full_name='depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.credentials', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='credentials', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETCREDENTIALSRESPONSE_CREDENTIAL, _GETCREDENTIALSRESPONSE_CREDENTIALSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2795,
serialized_end=3347,
)
_UPDATECREDENTIALREQUEST = _descriptor.Descriptor(
name='UpdateCredentialRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateCredentialRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='depot.devtools.auth.v0.identity.admin.UpdateCredentialRequest.data', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='data', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateCredentialRequest.connection_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='connectionId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateCredentialRequest.identity_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3350,
serialized_end=3490,
)
_UPDATECREDENTIALRESPONSE = _descriptor.Descriptor(
name='UpdateCredentialResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateCredentialResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3492,
serialized_end=3518,
)
_CREATECONNECTIONREQUEST = _descriptor.Descriptor(
name='CreateConnectionRequest',
full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.client_secret', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='button_image_url', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.button_image_url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='buttonImageUrl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='provider', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.provider', index=4,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='provider', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oidc_discovery_url', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.oidc_discovery_url', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='oidcDiscoveryUrl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mfa', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.mfa', index=6,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mfa', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.type', index=7,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='type', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scopes', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionRequest.scopes', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='scopes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_CREATECONNECTIONREQUEST_PROVIDERS,
_CREATECONNECTIONREQUEST_TYPES,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3521,
serialized_end=4679,
)
_CREATECONNECTIONRESPONSE = _descriptor.Descriptor(
name='CreateConnectionResponse',
full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.CreateConnectionResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4681,
serialized_end=4727,
)
_GETCONNECTIONSREQUEST = _descriptor.Descriptor(
name='GetConnectionsRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4729,
serialized_end=4752,
)
_GETCONNECTIONSRESPONSE_CONNECTION = _descriptor.Descriptor(
name='Connection',
full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.client_secret', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='button_image_url', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.button_image_url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='buttonImageUrl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='provider', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.provider', index=4,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='provider', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oidc_discovery_url', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.oidc_discovery_url', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='oidcDiscoveryUrl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mfa', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.mfa', index=6,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mfa', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.type', index=7,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='type', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.id', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scopes', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.scopes', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='scopes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='purpose', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection.purpose', index=10,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='purpose', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4890,
serialized_end=5422,
)
_GETCONNECTIONSRESPONSE = _descriptor.Descriptor(
name='GetConnectionsResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='connections', full_name='depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.connections', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='connections', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETCONNECTIONSRESPONSE_CONNECTION, ],
enum_types=[
_GETCONNECTIONSRESPONSE_PROVIDERS,
_GETCONNECTIONSRESPONSE_TYPES,
_GETCONNECTIONSRESPONSE_PURPOSES,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4755,
serialized_end=6232,
)
_UPDATECONNECTIONREQUEST = _descriptor.Descriptor(
name='UpdateConnectionRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.client_secret', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='button_image_url', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.button_image_url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='buttonImageUrl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='provider', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.provider', index=4,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='provider', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oidc_discovery_url', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.oidc_discovery_url', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='oidcDiscoveryUrl', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mfa', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.mfa', index=6,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mfa', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.type', index=7,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='type', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.id', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scopes', full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest.scopes', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='scopes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_UPDATECONNECTIONREQUEST_PROVIDERS,
_UPDATECONNECTIONREQUEST_TYPES,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6235,
serialized_end=7409,
)
_UPDATECONNECTIONRESPONSE = _descriptor.Descriptor(
name='UpdateConnectionResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateConnectionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7411,
serialized_end=7437,
)
_DELETECONNECTIONREQUEST = _descriptor.Descriptor(
name='DeleteConnectionRequest',
full_name='depot.devtools.auth.v0.identity.admin.DeleteConnectionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.DeleteConnectionRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7439,
serialized_end=7480,
)
_DELETECONNECTIONRESPONSE = _descriptor.Descriptor(
name='DeleteConnectionResponse',
full_name='depot.devtools.auth.v0.identity.admin.DeleteConnectionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7482,
serialized_end=7508,
)
_GETOAUTH2CLIENTSREQUEST = _descriptor.Descriptor(
name='GetOAuth2ClientsRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7510,
serialized_end=7535,
)
_GETOAUTH2CLIENTSRESPONSE_CLIENT = _descriptor.Descriptor(
name='Client',
full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='allowed_cors_origins', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.allowed_cors_origins', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='allowedCorsOrigins', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='audience', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.audience', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='audience', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backchannel_logout_session_required', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.backchannel_logout_session_required', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='backchannelLogoutSessionRequired', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backchannel_logout_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.backchannel_logout_uri', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='backchannelLogoutUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.client_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_name', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.client_name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret_expires_at', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.client_secret_expires_at', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecretExpiresAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.client_uri', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contacts', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.contacts', index=8,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='contacts', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.created_at', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frontchannel_logout_session_required', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.frontchannel_logout_session_required', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='frontchannelLogoutSessionRequired', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frontchannel_logout_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.frontchannel_logout_uri', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='frontchannelLogoutUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='grant_types', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.grant_types', index=12,
number=13, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='grantTypes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jwks', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.jwks', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jwks', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jwks_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.jwks_uri', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jwksUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logo_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.logo_uri', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='logoUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.metadata', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='metadata', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='owner', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.owner', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='owner', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='policy_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.policy_uri', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='policyUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='post_logout_redirect_uris', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.post_logout_redirect_uris', index=19,
number=20, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='postLogoutRedirectUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='redirect_uris', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.redirect_uris', index=20,
number=21, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='redirectUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_object_signing_alg', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.request_object_signing_alg', index=21,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='requestObjectSigningAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_uris', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.request_uris', index=22,
number=23, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='requestUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='response_types', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.response_types', index=23,
number=24, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='responseTypes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scope', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.scope', index=24,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='scope', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sector_identifier_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.sector_identifier_uri', index=25,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sectorIdentifierUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject_type', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.subject_type', index=26,
number=27, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='subjectType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_endpoint_auth_method', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.token_endpoint_auth_method', index=27,
number=28, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tokenEndpointAuthMethod', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_endpoint_auth_signing_alg', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.token_endpoint_auth_signing_alg', index=28,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tokenEndpointAuthSigningAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tos_uri', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.tos_uri', index=29,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tosUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.updated_at', index=30,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='userinfo_signed_response_alg', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.userinfo_signed_response_alg', index=31,
number=32, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='userinfoSignedResponseAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_type', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client.client_type', index=32,
number=33, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7665,
serialized_end=9619,
)
_GETOAUTH2CLIENTSRESPONSE = _descriptor.Descriptor(
name='GetOAuth2ClientsResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='clients', full_name='depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.clients', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clients', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETOAUTH2CLIENTSRESPONSE_CLIENT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7538,
serialized_end=9619,
)
_CREATEOAUTH2CLIENTREQUEST = _descriptor.Descriptor(
name='CreateOAuth2ClientRequest',
full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='allowed_cors_origins', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.allowed_cors_origins', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='allowedCorsOrigins', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='audience', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.audience', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='audience', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backchannel_logout_session_required', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.backchannel_logout_session_required', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='backchannelLogoutSessionRequired', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backchannel_logout_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.backchannel_logout_uri', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='backchannelLogoutUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_name', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.client_name', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.client_uri', index=5,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contacts', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.contacts', index=6,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='contacts', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frontchannel_logout_session_required', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.frontchannel_logout_session_required', index=7,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='frontchannelLogoutSessionRequired', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frontchannel_logout_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.frontchannel_logout_uri', index=8,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='frontchannelLogoutUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='grant_types', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.grant_types', index=9,
number=13, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='grantTypes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jwks', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.jwks', index=10,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jwks', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jwks_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.jwks_uri', index=11,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jwksUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logo_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.logo_uri', index=12,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='logoUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.metadata', index=13,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='metadata', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='owner', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.owner', index=14,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='owner', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='policy_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.policy_uri', index=15,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='policyUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='post_logout_redirect_uris', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.post_logout_redirect_uris', index=16,
number=20, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='postLogoutRedirectUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='redirect_uris', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.redirect_uris', index=17,
number=21, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='redirectUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_object_signing_alg', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.request_object_signing_alg', index=18,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='requestObjectSigningAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_uris', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.request_uris', index=19,
number=23, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='requestUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='response_types', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.response_types', index=20,
number=24, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='responseTypes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scope', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.scope', index=21,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='scope', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sector_identifier_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.sector_identifier_uri', index=22,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sectorIdentifierUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject_type', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.subject_type', index=23,
number=27, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='subjectType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_endpoint_auth_method', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.token_endpoint_auth_method', index=24,
number=28, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tokenEndpointAuthMethod', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_endpoint_auth_signing_alg', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.token_endpoint_auth_signing_alg', index=25,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tokenEndpointAuthSigningAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tos_uri', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.tos_uri', index=26,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tosUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='userinfo_signed_response_alg', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.userinfo_signed_response_alg', index=27,
number=31, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='userinfoSignedResponseAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.client_secret', index=28,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_type', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest.client_type', index=29,
number=33, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9622,
serialized_end=11428,
)
_CREATEOAUTH2CLIENTRESPONSE = _descriptor.Descriptor(
name='CreateOAuth2ClientResponse',
full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientResponse.client_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientResponse.client_secret', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11430,
serialized_end=11524,
)
_UPDATEOAUTH2CLIENTREQUEST = _descriptor.Descriptor(
name='UpdateOAuth2ClientRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='allowed_cors_origins', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.allowed_cors_origins', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='allowedCorsOrigins', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='audience', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.audience', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='audience', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backchannel_logout_session_required', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.backchannel_logout_session_required', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='backchannelLogoutSessionRequired', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backchannel_logout_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.backchannel_logout_uri', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='backchannelLogoutUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_name', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.client_name', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientName', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.client_uri', index=5,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contacts', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.contacts', index=6,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='contacts', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frontchannel_logout_session_required', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.frontchannel_logout_session_required', index=7,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='frontchannelLogoutSessionRequired', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frontchannel_logout_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.frontchannel_logout_uri', index=8,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='frontchannelLogoutUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='grant_types', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.grant_types', index=9,
number=13, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='grantTypes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jwks', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.jwks', index=10,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jwks', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jwks_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.jwks_uri', index=11,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jwksUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logo_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.logo_uri', index=12,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='logoUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.metadata', index=13,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='metadata', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='owner', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.owner', index=14,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='owner', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='policy_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.policy_uri', index=15,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='policyUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='post_logout_redirect_uris', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.post_logout_redirect_uris', index=16,
number=20, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='postLogoutRedirectUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='redirect_uris', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.redirect_uris', index=17,
number=21, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='redirectUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_object_signing_alg', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.request_object_signing_alg', index=18,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='requestObjectSigningAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_uris', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.request_uris', index=19,
number=23, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='requestUris', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='response_types', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.response_types', index=20,
number=24, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='responseTypes', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scope', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.scope', index=21,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='scope', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sector_identifier_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.sector_identifier_uri', index=22,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sectorIdentifierUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject_type', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.subject_type', index=23,
number=27, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='subjectType', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_endpoint_auth_method', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.token_endpoint_auth_method', index=24,
number=28, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tokenEndpointAuthMethod', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_endpoint_auth_signing_alg', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.token_endpoint_auth_signing_alg', index=25,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tokenEndpointAuthSigningAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tos_uri', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.tos_uri', index=26,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tosUri', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='userinfo_signed_response_alg', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.userinfo_signed_response_alg', index=27,
number=31, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='userinfoSignedResponseAlg', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.client_secret', index=28,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest.client_id', index=29,
number=33, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11527,
serialized_end=13278,
)
_UPDATEOAUTH2CLIENTRESPONSE = _descriptor.Descriptor(
name='UpdateOAuth2ClientResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientResponse.client_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_secret', full_name='depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientResponse.client_secret', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientSecret', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13280,
serialized_end=13374,
)
_DELETEOAUTH2CLIENTREQUEST = _descriptor.Descriptor(
name='DeleteOAuth2ClientRequest',
full_name='depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='client_id', full_name='depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientRequest.client_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='clientId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13376,
serialized_end=13432,
)
_DELETEOAUTH2CLIENTRESPONSE = _descriptor.Descriptor(
name='DeleteOAuth2ClientResponse',
full_name='depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13434,
serialized_end=13462,
)
_GETADDRESSESREQUEST = _descriptor.Descriptor(
name='GetAddressesRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetAddressesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13464,
serialized_end=13518,
)
_GETADDRESSESRESPONSE_ADDRESS = _descriptor.Descriptor(
name='Address',
full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address.identity_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address.address', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='verified', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address.verified', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='verified', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id_schema_key', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address.id_schema_key', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='idSchemaKey', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13645,
serialized_end=13813,
)
_GETADDRESSESRESPONSE = _descriptor.Descriptor(
name='GetAddressesResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='addresses', full_name='depot.devtools.auth.v0.identity.admin.GetAddressesResponse.addresses', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addresses', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETADDRESSESRESPONSE_ADDRESS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13521,
serialized_end=13813,
)
_GETADDRESSREQUEST = _descriptor.Descriptor(
name='GetAddressRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetAddressRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetAddressRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='verification', full_name='depot.devtools.auth.v0.identity.admin.GetAddressRequest.verification', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='verification', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='match', full_name='depot.devtools.auth.v0.identity.admin.GetAddressRequest.match',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=13815,
serialized_end=13899,
)
_GETADDRESSRESPONSE = _descriptor.Descriptor(
name='GetAddressResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse.identity_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse.address', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='verified', full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse.verified', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='verified', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id_schema_key', full_name='depot.devtools.auth.v0.identity.admin.GetAddressResponse.id_schema_key', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='idSchemaKey', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13902,
serialized_end=14081,
)
_UPDATEADDRESSREQUEST = _descriptor.Descriptor(
name='UpdateAddressRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateAddressRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.UpdateAddressRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='verified', full_name='depot.devtools.auth.v0.identity.admin.UpdateAddressRequest.verified', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='verified', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='depot.devtools.auth.v0.identity.admin.UpdateAddressRequest.address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='attribute', full_name='depot.devtools.auth.v0.identity.admin.UpdateAddressRequest.attribute',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=14083,
serialized_end=14192,
)
_UPDATEADDRESSRESPONSE = _descriptor.Descriptor(
name='UpdateAddressResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateAddressResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14194,
serialized_end=14217,
)
_GETTRAITSREQUEST = _descriptor.Descriptor(
name='GetTraitsRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetTraitsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetTraitsRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14219,
serialized_end=14270,
)
_GETTRAITSRESPONSE = _descriptor.Descriptor(
name='GetTraitsResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetTraitsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='traits', full_name='depot.devtools.auth.v0.identity.admin.GetTraitsResponse.traits', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='traits', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14272,
serialized_end=14315,
)
_UPDATETRAITSREQUEST = _descriptor.Descriptor(
name='UpdateTraitsRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateTraitsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.UpdateTraitsRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='traits', full_name='depot.devtools.auth.v0.identity.admin.UpdateTraitsRequest.traits', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='traits', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14317,
serialized_end=14395,
)
_UPDATETRAITSRESPONSE = _descriptor.Descriptor(
name='UpdateTraitsResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateTraitsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14397,
serialized_end=14419,
)
_GETIDENTITYLOGINATTEMPTSREQUEST = _descriptor.Descriptor(
name='GetIdentityLoginAttemptsRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsRequest.identity_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14421,
serialized_end=14487,
)
_GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT = _descriptor.Descriptor(
name='Attempt',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identity_id', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt.identity_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identityId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt.status', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='status', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='authentication_methods', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt.authentication_methods', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='authenticationMethods', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt.created_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expires_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt.expires_at', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiresAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14636,
serialized_end=14891,
)
_GETIDENTITYLOGINATTEMPTSRESPONSE = _descriptor.Descriptor(
name='GetIdentityLoginAttemptsResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attempts', full_name='depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.attempts', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='attempts', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14490,
serialized_end=14891,
)
_CREATEIDSCHEMAREQUEST = _descriptor.Descriptor(
name='CreateIdSchemaRequest',
full_name='depot.devtools.auth.v0.identity.admin.CreateIdSchemaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.CreateIdSchemaRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.CreateIdSchemaRequest.content', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14893,
serialized_end=14987,
)
_CREATEIDSCHEMARESPONSE = _descriptor.Descriptor(
name='CreateIdSchemaResponse',
full_name='depot.devtools.auth.v0.identity.admin.CreateIdSchemaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.CreateIdSchemaResponse.id', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14989,
serialized_end=15029,
)
_GETIDSCHEMASREQUEST = _descriptor.Descriptor(
name='GetIdSchemasRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15031,
serialized_end=15052,
)
_GETIDSCHEMASRESPONSE_JSONSCHEMA = _descriptor.Descriptor(
name='JsonSchema',
full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema.content', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema.created_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema.updated_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_default', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema.is_default', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='isDefault', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15186,
serialized_end=15434,
)
_GETIDSCHEMASRESPONSE = _descriptor.Descriptor(
name='GetIdSchemasResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='jsonschemas', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.jsonschemas', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='jsonschemas', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETIDSCHEMASRESPONSE_JSONSCHEMA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15055,
serialized_end=15434,
)
_GETIDSCHEMAREQUEST = _descriptor.Descriptor(
name='GetIdSchemaRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15436,
serialized_end=15472,
)
_GETIDSCHEMARESPONSE = _descriptor.Descriptor(
name='GetIdSchemaResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse.content', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse.created_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse.updated_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_default', full_name='depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse.is_default', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='isDefault', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15475,
serialized_end=15732,
)
_GETDEFAULTIDSCHEMAREQUEST = _descriptor.Descriptor(
name='GetDefaultIdSchemaRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15734,
serialized_end=15761,
)
_GETDEFAULTIDSCHEMARESPONSE = _descriptor.Descriptor(
name='GetDefaultIdSchemaResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse.content', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse.created_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='createdAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse.updated_at', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='updatedAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_default', full_name='depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse.is_default', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='isDefault', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15764,
serialized_end=16028,
)
_UPDATEIDSCHEMAREQUEST = _descriptor.Descriptor(
name='UpdateIdSchemaRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateIdSchemaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.UpdateIdSchemaRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='depot.devtools.auth.v0.identity.admin.UpdateIdSchemaRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='name', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.UpdateIdSchemaRequest.content', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16030,
serialized_end=16140,
)
_UPDATEIDSCHEMARESPONSE = _descriptor.Descriptor(
name='UpdateIdSchemaResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateIdSchemaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16142,
serialized_end=16166,
)
_MARKDEFAULTIDSCHEMAREQUEST = _descriptor.Descriptor(
name='MarkDefaultIdSchemaRequest',
full_name='depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16168,
serialized_end=16212,
)
_MARKDEFAULTIDSCHEMARESPONSE = _descriptor.Descriptor(
name='MarkDefaultIdSchemaResponse',
full_name='depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16214,
serialized_end=16243,
)
_DELETEIDSCHEMAREQUEST = _descriptor.Descriptor(
name='DeleteIdSchemaRequest',
full_name='depot.devtools.auth.v0.identity.admin.DeleteIdSchemaRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='depot.devtools.auth.v0.identity.admin.DeleteIdSchemaRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='id', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16245,
serialized_end=16284,
)
_DELETEIDSCHEMARESPONSE = _descriptor.Descriptor(
name='DeleteIdSchemaResponse',
full_name='depot.devtools.auth.v0.identity.admin.DeleteIdSchemaResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16286,
serialized_end=16310,
)
_GETEMAILSSETUPREQUEST = _descriptor.Descriptor(
name='GetEmailsSetupRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16312,
serialized_end=16335,
)
_GETEMAILSSETUPRESPONSE_EMAILTEMPLATE = _descriptor.Descriptor(
name='EmailTemplate',
full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplate.content', index=0,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplate.subject', index=1,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='subject', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17058,
serialized_end=17125,
)
_GETEMAILSSETUPRESPONSE_EMAILSENDER = _descriptor.Descriptor(
name='EmailSender',
full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='email_address', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender.email_address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='emailAddress', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_host', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender.smtp_host', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpHost', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_port', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender.smtp_port', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpPort', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_username', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender.smtp_username', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpUsername', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_password', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender.smtp_password', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpPassword', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17128,
serialized_end=17310,
)
_GETEMAILSSETUPRESPONSE = _descriptor.Descriptor(
name='GetEmailsSetupResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='welcome', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.welcome', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='welcome', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='account_verification', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.account_verification', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accountVerification', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='account_recovery', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.account_recovery', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accountRecovery', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='account_recovered', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.account_recovered', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accountRecovered', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='otp', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.otp', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='otp', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.smtp', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtp', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.project_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='projectId', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETEMAILSSETUPRESPONSE_EMAILTEMPLATE, _GETEMAILSSETUPRESPONSE_EMAILSENDER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16338,
serialized_end=17310,
)
_UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE = _descriptor.Descriptor(
name='EmailTemplate',
full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='content', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplate.content', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='content', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subject', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplate.subject', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='subject', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18017,
serialized_end=18084,
)
_UPDATEEMAILSSETUPREQUEST_EMAILSENDER = _descriptor.Descriptor(
name='EmailSender',
full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='email_address', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender.email_address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='emailAddress', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_host', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender.smtp_host', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpHost', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_port', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender.smtp_port', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpPort', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_username', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender.smtp_username', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpUsername', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp_password', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender.smtp_password', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtpPassword', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17128,
serialized_end=17310,
)
_UPDATEEMAILSSETUPREQUEST = _descriptor.Descriptor(
name='UpdateEmailsSetupRequest',
full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='welcome', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.welcome', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='welcome', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='account_verification', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.account_verification', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accountVerification', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='account_recovery', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.account_recovery', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accountRecovery', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='account_recovered', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.account_recovered', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accountRecovered', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='otp', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.otp', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='otp', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='smtp', full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.smtp', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='smtp', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE, _UPDATEEMAILSSETUPREQUEST_EMAILSENDER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17313,
serialized_end=18269,
)
_UPDATEEMAILSSETUPRESPONSE = _descriptor.Descriptor(
name='UpdateEmailsSetupResponse',
full_name='depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18271,
serialized_end=18298,
)
_GETUSERBASESTATISTICSREQUEST = _descriptor.Descriptor(
name='GetUserBaseStatisticsRequest',
full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='days_before', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsRequest.days_before', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='daysBefore', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lifetime_start_at', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsRequest.lifetime_start_at', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lifetimeStartAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lifetime_end_at', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsRequest.lifetime_end_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lifetimeEndAt', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18301,
serialized_end=18504,
)
_GETUSERBASESTATISTICSRESPONSE_USERSPERDAYENTRY = _descriptor.Descriptor(
name='UsersPerDayEntry',
full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.UsersPerDayEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.UsersPerDayEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='key', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.UsersPerDayEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18923,
serialized_end=18985,
)
_GETUSERBASESTATISTICSRESPONSE_SESSIONSPERDAYENTRY = _descriptor.Descriptor(
name='SessionsPerDayEntry',
full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.SessionsPerDayEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.SessionsPerDayEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='key', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.SessionsPerDayEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18987,
serialized_end=19052,
)
_GETUSERBASESTATISTICSRESPONSE = _descriptor.Descriptor(
name='GetUserBaseStatisticsResponse',
full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='total_users', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.total_users', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='totalUsers', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='users_per_day', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.users_per_day', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='usersPerDay', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sessions_per_day', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.sessions_per_day', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sessionsPerDay', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_active_users', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.total_active_users', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='totalActiveUsers', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_lifetime_used', full_name='depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.total_lifetime_used', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='totalLifetimeUsed', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETUSERBASESTATISTICSRESPONSE_USERSPERDAYENTRY, _GETUSERBASESTATISTICSRESPONSE_SESSIONSPERDAYENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18507,
serialized_end=19052,
)
_CREATEIDENTITYREQUEST.fields_by_name['data'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETIDENTITIESRESPONSE_IDENTITY.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITIESRESPONSE_IDENTITY.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITIESRESPONSE_IDENTITY.fields_by_name['lock'].enum_type = _GETIDENTITIESRESPONSE_LOCK
_GETIDENTITIESRESPONSE_IDENTITY.containing_type = _GETIDENTITIESRESPONSE
_GETIDENTITIESRESPONSE.fields_by_name['identities'].message_type = _GETIDENTITIESRESPONSE_IDENTITY
_GETIDENTITIESRESPONSE_LOCK.containing_type = _GETIDENTITIESRESPONSE
_GETIDENTITYRESPONSE_CREDENTIALSIDSENTRY.containing_type = _GETIDENTITYRESPONSE
_GETIDENTITYRESPONSE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITYRESPONSE.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITYRESPONSE.fields_by_name['credentials_ids'].message_type = _GETIDENTITYRESPONSE_CREDENTIALSIDSENTRY
_GETIDENTITYRESPONSE.fields_by_name['lock'].enum_type = _GETIDENTITYRESPONSE_LOCK
_GETIDENTITYRESPONSE_LOCK.containing_type = _GETIDENTITYRESPONSE
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_CREDENTIALSIDSENTRY.containing_type = _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY.fields_by_name['credentials_ids'].message_type = _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_CREDENTIALSIDSENTRY
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY.fields_by_name['lock'].enum_type = _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_LOCK
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY.containing_type = _GETIDENTITIESBYATTRIBUTERESPONSE
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_LOCK.containing_type = _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY
_GETIDENTITIESBYATTRIBUTERESPONSE.fields_by_name['identities'].message_type = _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY
_UPDATEIDENTITYREQUEST.fields_by_name['lock'].enum_type = _UPDATEIDENTITYREQUEST_LOCK
_UPDATEIDENTITYREQUEST_LOCK.containing_type = _UPDATEIDENTITYREQUEST
_GETCREDENTIALSRESPONSE_CREDENTIAL.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETCREDENTIALSRESPONSE_CREDENTIAL.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETCREDENTIALSRESPONSE_CREDENTIAL.containing_type = _GETCREDENTIALSRESPONSE
_GETCREDENTIALSRESPONSE_CREDENTIALSENTRY.fields_by_name['value'].message_type = _GETCREDENTIALSRESPONSE_CREDENTIAL
_GETCREDENTIALSRESPONSE_CREDENTIALSENTRY.containing_type = _GETCREDENTIALSRESPONSE
_GETCREDENTIALSRESPONSE.fields_by_name['credentials'].message_type = _GETCREDENTIALSRESPONSE_CREDENTIALSENTRY
_UPDATECREDENTIALREQUEST.fields_by_name['data'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CREATECONNECTIONREQUEST.fields_by_name['provider'].enum_type = _CREATECONNECTIONREQUEST_PROVIDERS
_CREATECONNECTIONREQUEST.fields_by_name['type'].enum_type = _CREATECONNECTIONREQUEST_TYPES
_CREATECONNECTIONREQUEST_PROVIDERS.containing_type = _CREATECONNECTIONREQUEST
_CREATECONNECTIONREQUEST_TYPES.containing_type = _CREATECONNECTIONREQUEST
_GETCONNECTIONSRESPONSE_CONNECTION.fields_by_name['provider'].enum_type = _GETCONNECTIONSRESPONSE_PROVIDERS
_GETCONNECTIONSRESPONSE_CONNECTION.fields_by_name['type'].enum_type = _GETCONNECTIONSRESPONSE_TYPES
_GETCONNECTIONSRESPONSE_CONNECTION.fields_by_name['purpose'].enum_type = _GETCONNECTIONSRESPONSE_PURPOSES
_GETCONNECTIONSRESPONSE_CONNECTION.containing_type = _GETCONNECTIONSRESPONSE
_GETCONNECTIONSRESPONSE.fields_by_name['connections'].message_type = _GETCONNECTIONSRESPONSE_CONNECTION
_GETCONNECTIONSRESPONSE_PROVIDERS.containing_type = _GETCONNECTIONSRESPONSE
_GETCONNECTIONSRESPONSE_TYPES.containing_type = _GETCONNECTIONSRESPONSE
_GETCONNECTIONSRESPONSE_PURPOSES.containing_type = _GETCONNECTIONSRESPONSE
_UPDATECONNECTIONREQUEST.fields_by_name['provider'].enum_type = _UPDATECONNECTIONREQUEST_PROVIDERS
_UPDATECONNECTIONREQUEST.fields_by_name['type'].enum_type = _UPDATECONNECTIONREQUEST_TYPES
_UPDATECONNECTIONREQUEST_PROVIDERS.containing_type = _UPDATECONNECTIONREQUEST
_UPDATECONNECTIONREQUEST_TYPES.containing_type = _UPDATECONNECTIONREQUEST
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['grant_types'].enum_type = _GRANTTYPE
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['jwks'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['request_object_signing_alg'].enum_type = _REQUESTOBJECTSIGNINGALG
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['response_types'].enum_type = _RESPONSETYPE
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['subject_type'].enum_type = _SUBJECTTYPE
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['token_endpoint_auth_method'].enum_type = _TOKENENDPOINTAUTHMETHOD
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['token_endpoint_auth_signing_alg'].enum_type = _TOKENENDPOINTAUTHSIGNINGALG
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['userinfo_signed_response_alg'].enum_type = _USERINFOSIGNEDRESPONSEALG
_GETOAUTH2CLIENTSRESPONSE_CLIENT.fields_by_name['client_type'].enum_type = _CLIENTTYPE
_GETOAUTH2CLIENTSRESPONSE_CLIENT.containing_type = _GETOAUTH2CLIENTSRESPONSE
_GETOAUTH2CLIENTSRESPONSE.fields_by_name['clients'].message_type = _GETOAUTH2CLIENTSRESPONSE_CLIENT
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['grant_types'].enum_type = _GRANTTYPE
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['jwks'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['request_object_signing_alg'].enum_type = _REQUESTOBJECTSIGNINGALG
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['response_types'].enum_type = _RESPONSETYPE
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['subject_type'].enum_type = _SUBJECTTYPE
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['token_endpoint_auth_method'].enum_type = _TOKENENDPOINTAUTHMETHOD
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['token_endpoint_auth_signing_alg'].enum_type = _TOKENENDPOINTAUTHSIGNINGALG
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['userinfo_signed_response_alg'].enum_type = _USERINFOSIGNEDRESPONSEALG
_CREATEOAUTH2CLIENTREQUEST.fields_by_name['client_type'].enum_type = _CLIENTTYPE
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['grant_types'].enum_type = _GRANTTYPE
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['jwks'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['request_object_signing_alg'].enum_type = _REQUESTOBJECTSIGNINGALG
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['response_types'].enum_type = _RESPONSETYPE
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['subject_type'].enum_type = _SUBJECTTYPE
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['token_endpoint_auth_method'].enum_type = _TOKENENDPOINTAUTHMETHOD
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['token_endpoint_auth_signing_alg'].enum_type = _TOKENENDPOINTAUTHSIGNINGALG
_UPDATEOAUTH2CLIENTREQUEST.fields_by_name['userinfo_signed_response_alg'].enum_type = _USERINFOSIGNEDRESPONSEALG
_GETADDRESSESRESPONSE_ADDRESS.containing_type = _GETADDRESSESRESPONSE
_GETADDRESSESRESPONSE.fields_by_name['addresses'].message_type = _GETADDRESSESRESPONSE_ADDRESS
_GETADDRESSREQUEST.oneofs_by_name['match'].fields.append(
_GETADDRESSREQUEST.fields_by_name['id'])
_GETADDRESSREQUEST.fields_by_name['id'].containing_oneof = _GETADDRESSREQUEST.oneofs_by_name['match']
_GETADDRESSREQUEST.oneofs_by_name['match'].fields.append(
_GETADDRESSREQUEST.fields_by_name['verification'])
_GETADDRESSREQUEST.fields_by_name['verification'].containing_oneof = _GETADDRESSREQUEST.oneofs_by_name['match']
_UPDATEADDRESSREQUEST.oneofs_by_name['attribute'].fields.append(
_UPDATEADDRESSREQUEST.fields_by_name['verified'])
_UPDATEADDRESSREQUEST.fields_by_name['verified'].containing_oneof = _UPDATEADDRESSREQUEST.oneofs_by_name['attribute']
_UPDATEADDRESSREQUEST.oneofs_by_name['attribute'].fields.append(
_UPDATEADDRESSREQUEST.fields_by_name['address'])
_UPDATEADDRESSREQUEST.fields_by_name['address'].containing_oneof = _UPDATEADDRESSREQUEST.oneofs_by_name['attribute']
_GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT.containing_type = _GETIDENTITYLOGINATTEMPTSRESPONSE
_GETIDENTITYLOGINATTEMPTSRESPONSE.fields_by_name['attempts'].message_type = _GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT
_CREATEIDSCHEMAREQUEST.fields_by_name['content'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETIDSCHEMASRESPONSE_JSONSCHEMA.fields_by_name['content'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETIDSCHEMASRESPONSE_JSONSCHEMA.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDSCHEMASRESPONSE_JSONSCHEMA.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDSCHEMASRESPONSE_JSONSCHEMA.containing_type = _GETIDSCHEMASRESPONSE
_GETIDSCHEMASRESPONSE.fields_by_name['jsonschemas'].message_type = _GETIDSCHEMASRESPONSE_JSONSCHEMA
_GETIDSCHEMARESPONSE.fields_by_name['content'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETIDSCHEMARESPONSE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETIDSCHEMARESPONSE.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETDEFAULTIDSCHEMARESPONSE.fields_by_name['content'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETDEFAULTIDSCHEMARESPONSE.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETDEFAULTIDSCHEMARESPONSE.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_UPDATEIDSCHEMAREQUEST.fields_by_name['content'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_GETEMAILSSETUPRESPONSE_EMAILTEMPLATE.containing_type = _GETEMAILSSETUPRESPONSE
_GETEMAILSSETUPRESPONSE_EMAILSENDER.containing_type = _GETEMAILSSETUPRESPONSE
_GETEMAILSSETUPRESPONSE.fields_by_name['welcome'].message_type = _GETEMAILSSETUPRESPONSE_EMAILTEMPLATE
_GETEMAILSSETUPRESPONSE.fields_by_name['account_verification'].message_type = _GETEMAILSSETUPRESPONSE_EMAILTEMPLATE
_GETEMAILSSETUPRESPONSE.fields_by_name['account_recovery'].message_type = _GETEMAILSSETUPRESPONSE_EMAILTEMPLATE
_GETEMAILSSETUPRESPONSE.fields_by_name['account_recovered'].message_type = _GETEMAILSSETUPRESPONSE_EMAILTEMPLATE
_GETEMAILSSETUPRESPONSE.fields_by_name['otp'].message_type = _GETEMAILSSETUPRESPONSE_EMAILTEMPLATE
_GETEMAILSSETUPRESPONSE.fields_by_name['smtp'].message_type = _GETEMAILSSETUPRESPONSE_EMAILSENDER
_UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE.containing_type = _UPDATEEMAILSSETUPREQUEST
_UPDATEEMAILSSETUPREQUEST_EMAILSENDER.containing_type = _UPDATEEMAILSSETUPREQUEST
_UPDATEEMAILSSETUPREQUEST.fields_by_name['welcome'].message_type = _UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE
_UPDATEEMAILSSETUPREQUEST.fields_by_name['account_verification'].message_type = _UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE
_UPDATEEMAILSSETUPREQUEST.fields_by_name['account_recovery'].message_type = _UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE
_UPDATEEMAILSSETUPREQUEST.fields_by_name['account_recovered'].message_type = _UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE
_UPDATEEMAILSSETUPREQUEST.fields_by_name['otp'].message_type = _UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE
_UPDATEEMAILSSETUPREQUEST.fields_by_name['smtp'].message_type = _UPDATEEMAILSSETUPREQUEST_EMAILSENDER
_GETUSERBASESTATISTICSREQUEST.fields_by_name['lifetime_start_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETUSERBASESTATISTICSREQUEST.fields_by_name['lifetime_end_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_GETUSERBASESTATISTICSRESPONSE_USERSPERDAYENTRY.containing_type = _GETUSERBASESTATISTICSRESPONSE
_GETUSERBASESTATISTICSRESPONSE_SESSIONSPERDAYENTRY.containing_type = _GETUSERBASESTATISTICSRESPONSE
_GETUSERBASESTATISTICSRESPONSE.fields_by_name['users_per_day'].message_type = _GETUSERBASESTATISTICSRESPONSE_USERSPERDAYENTRY
_GETUSERBASESTATISTICSRESPONSE.fields_by_name['sessions_per_day'].message_type = _GETUSERBASESTATISTICSRESPONSE_SESSIONSPERDAYENTRY
DESCRIPTOR.message_types_by_name['CreateIdentityRequest'] = _CREATEIDENTITYREQUEST
DESCRIPTOR.message_types_by_name['CreateIdentityResponse'] = _CREATEIDENTITYRESPONSE
DESCRIPTOR.message_types_by_name['GetIdentitiesRequest'] = _GETIDENTITIESREQUEST
DESCRIPTOR.message_types_by_name['GetIdentitiesResponse'] = _GETIDENTITIESRESPONSE
DESCRIPTOR.message_types_by_name['GetIdentityRequest'] = _GETIDENTITYREQUEST
DESCRIPTOR.message_types_by_name['GetIdentityResponse'] = _GETIDENTITYRESPONSE
DESCRIPTOR.message_types_by_name['GetIdentitiesByAttributeRequest'] = _GETIDENTITIESBYATTRIBUTEREQUEST
DESCRIPTOR.message_types_by_name['GetIdentitiesByAttributeResponse'] = _GETIDENTITIESBYATTRIBUTERESPONSE
DESCRIPTOR.message_types_by_name['UpdateIdentityRequest'] = _UPDATEIDENTITYREQUEST
DESCRIPTOR.message_types_by_name['UpdateIdentityResponse'] = _UPDATEIDENTITYRESPONSE
DESCRIPTOR.message_types_by_name['DeleteIdentityRequest'] = _DELETEIDENTITYREQUEST
DESCRIPTOR.message_types_by_name['DeleteIdentityResponse'] = _DELETEIDENTITYRESPONSE
DESCRIPTOR.message_types_by_name['GetCredentialsRequest'] = _GETCREDENTIALSREQUEST
DESCRIPTOR.message_types_by_name['GetCredentialsResponse'] = _GETCREDENTIALSRESPONSE
DESCRIPTOR.message_types_by_name['UpdateCredentialRequest'] = _UPDATECREDENTIALREQUEST
DESCRIPTOR.message_types_by_name['UpdateCredentialResponse'] = _UPDATECREDENTIALRESPONSE
DESCRIPTOR.message_types_by_name['CreateConnectionRequest'] = _CREATECONNECTIONREQUEST
DESCRIPTOR.message_types_by_name['CreateConnectionResponse'] = _CREATECONNECTIONRESPONSE
DESCRIPTOR.message_types_by_name['GetConnectionsRequest'] = _GETCONNECTIONSREQUEST
DESCRIPTOR.message_types_by_name['GetConnectionsResponse'] = _GETCONNECTIONSRESPONSE
DESCRIPTOR.message_types_by_name['UpdateConnectionRequest'] = _UPDATECONNECTIONREQUEST
DESCRIPTOR.message_types_by_name['UpdateConnectionResponse'] = _UPDATECONNECTIONRESPONSE
DESCRIPTOR.message_types_by_name['DeleteConnectionRequest'] = _DELETECONNECTIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteConnectionResponse'] = _DELETECONNECTIONRESPONSE
DESCRIPTOR.message_types_by_name['GetOAuth2ClientsRequest'] = _GETOAUTH2CLIENTSREQUEST
DESCRIPTOR.message_types_by_name['GetOAuth2ClientsResponse'] = _GETOAUTH2CLIENTSRESPONSE
DESCRIPTOR.message_types_by_name['CreateOAuth2ClientRequest'] = _CREATEOAUTH2CLIENTREQUEST
DESCRIPTOR.message_types_by_name['CreateOAuth2ClientResponse'] = _CREATEOAUTH2CLIENTRESPONSE
DESCRIPTOR.message_types_by_name['UpdateOAuth2ClientRequest'] = _UPDATEOAUTH2CLIENTREQUEST
DESCRIPTOR.message_types_by_name['UpdateOAuth2ClientResponse'] = _UPDATEOAUTH2CLIENTRESPONSE
DESCRIPTOR.message_types_by_name['DeleteOAuth2ClientRequest'] = _DELETEOAUTH2CLIENTREQUEST
DESCRIPTOR.message_types_by_name['DeleteOAuth2ClientResponse'] = _DELETEOAUTH2CLIENTRESPONSE
DESCRIPTOR.message_types_by_name['GetAddressesRequest'] = _GETADDRESSESREQUEST
DESCRIPTOR.message_types_by_name['GetAddressesResponse'] = _GETADDRESSESRESPONSE
DESCRIPTOR.message_types_by_name['GetAddressRequest'] = _GETADDRESSREQUEST
DESCRIPTOR.message_types_by_name['GetAddressResponse'] = _GETADDRESSRESPONSE
DESCRIPTOR.message_types_by_name['UpdateAddressRequest'] = _UPDATEADDRESSREQUEST
DESCRIPTOR.message_types_by_name['UpdateAddressResponse'] = _UPDATEADDRESSRESPONSE
DESCRIPTOR.message_types_by_name['GetTraitsRequest'] = _GETTRAITSREQUEST
DESCRIPTOR.message_types_by_name['GetTraitsResponse'] = _GETTRAITSRESPONSE
DESCRIPTOR.message_types_by_name['UpdateTraitsRequest'] = _UPDATETRAITSREQUEST
DESCRIPTOR.message_types_by_name['UpdateTraitsResponse'] = _UPDATETRAITSRESPONSE
DESCRIPTOR.message_types_by_name['GetIdentityLoginAttemptsRequest'] = _GETIDENTITYLOGINATTEMPTSREQUEST
DESCRIPTOR.message_types_by_name['GetIdentityLoginAttemptsResponse'] = _GETIDENTITYLOGINATTEMPTSRESPONSE
DESCRIPTOR.message_types_by_name['CreateIdSchemaRequest'] = _CREATEIDSCHEMAREQUEST
DESCRIPTOR.message_types_by_name['CreateIdSchemaResponse'] = _CREATEIDSCHEMARESPONSE
DESCRIPTOR.message_types_by_name['GetIdSchemasRequest'] = _GETIDSCHEMASREQUEST
DESCRIPTOR.message_types_by_name['GetIdSchemasResponse'] = _GETIDSCHEMASRESPONSE
DESCRIPTOR.message_types_by_name['GetIdSchemaRequest'] = _GETIDSCHEMAREQUEST
DESCRIPTOR.message_types_by_name['GetIdSchemaResponse'] = _GETIDSCHEMARESPONSE
DESCRIPTOR.message_types_by_name['GetDefaultIdSchemaRequest'] = _GETDEFAULTIDSCHEMAREQUEST
DESCRIPTOR.message_types_by_name['GetDefaultIdSchemaResponse'] = _GETDEFAULTIDSCHEMARESPONSE
DESCRIPTOR.message_types_by_name['UpdateIdSchemaRequest'] = _UPDATEIDSCHEMAREQUEST
DESCRIPTOR.message_types_by_name['UpdateIdSchemaResponse'] = _UPDATEIDSCHEMARESPONSE
DESCRIPTOR.message_types_by_name['MarkDefaultIdSchemaRequest'] = _MARKDEFAULTIDSCHEMAREQUEST
DESCRIPTOR.message_types_by_name['MarkDefaultIdSchemaResponse'] = _MARKDEFAULTIDSCHEMARESPONSE
DESCRIPTOR.message_types_by_name['DeleteIdSchemaRequest'] = _DELETEIDSCHEMAREQUEST
DESCRIPTOR.message_types_by_name['DeleteIdSchemaResponse'] = _DELETEIDSCHEMARESPONSE
DESCRIPTOR.message_types_by_name['GetEmailsSetupRequest'] = _GETEMAILSSETUPREQUEST
DESCRIPTOR.message_types_by_name['GetEmailsSetupResponse'] = _GETEMAILSSETUPRESPONSE
DESCRIPTOR.message_types_by_name['UpdateEmailsSetupRequest'] = _UPDATEEMAILSSETUPREQUEST
DESCRIPTOR.message_types_by_name['UpdateEmailsSetupResponse'] = _UPDATEEMAILSSETUPRESPONSE
DESCRIPTOR.message_types_by_name['GetUserBaseStatisticsRequest'] = _GETUSERBASESTATISTICSREQUEST
DESCRIPTOR.message_types_by_name['GetUserBaseStatisticsResponse'] = _GETUSERBASESTATISTICSRESPONSE
DESCRIPTOR.enum_types_by_name['ClientType'] = _CLIENTTYPE
DESCRIPTOR.enum_types_by_name['SubjectType'] = _SUBJECTTYPE
DESCRIPTOR.enum_types_by_name['TokenEndpointAuthMethod'] = _TOKENENDPOINTAUTHMETHOD
DESCRIPTOR.enum_types_by_name['TokenEndpointAuthSigningAlg'] = _TOKENENDPOINTAUTHSIGNINGALG
DESCRIPTOR.enum_types_by_name['GrantType'] = _GRANTTYPE
DESCRIPTOR.enum_types_by_name['ResponseType'] = _RESPONSETYPE
DESCRIPTOR.enum_types_by_name['RequestObjectSigningAlg'] = _REQUESTOBJECTSIGNINGALG
DESCRIPTOR.enum_types_by_name['UserinfoSignedResponseAlg'] = _USERINFOSIGNEDRESPONSEALG
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateIdentityRequest = _reflection.GeneratedProtocolMessageType('CreateIdentityRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEIDENTITYREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateIdentityRequest)
})
_sym_db.RegisterMessage(CreateIdentityRequest)
CreateIdentityResponse = _reflection.GeneratedProtocolMessageType('CreateIdentityResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEIDENTITYRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateIdentityResponse)
})
_sym_db.RegisterMessage(CreateIdentityResponse)
GetIdentitiesRequest = _reflection.GeneratedProtocolMessageType('GetIdentitiesRequest', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITIESREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesRequest)
})
_sym_db.RegisterMessage(GetIdentitiesRequest)
GetIdentitiesResponse = _reflection.GeneratedProtocolMessageType('GetIdentitiesResponse', (_message.Message,), {
'Identity' : _reflection.GeneratedProtocolMessageType('Identity', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITIESRESPONSE_IDENTITY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse.Identity)
})
,
'DESCRIPTOR' : _GETIDENTITIESRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesResponse)
})
_sym_db.RegisterMessage(GetIdentitiesResponse)
_sym_db.RegisterMessage(GetIdentitiesResponse.Identity)
GetIdentityRequest = _reflection.GeneratedProtocolMessageType('GetIdentityRequest', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITYREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentityRequest)
})
_sym_db.RegisterMessage(GetIdentityRequest)
GetIdentityResponse = _reflection.GeneratedProtocolMessageType('GetIdentityResponse', (_message.Message,), {
'CredentialsIdsEntry' : _reflection.GeneratedProtocolMessageType('CredentialsIdsEntry', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITYRESPONSE_CREDENTIALSIDSENTRY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentityResponse.CredentialsIdsEntry)
})
,
'DESCRIPTOR' : _GETIDENTITYRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentityResponse)
})
_sym_db.RegisterMessage(GetIdentityResponse)
_sym_db.RegisterMessage(GetIdentityResponse.CredentialsIdsEntry)
GetIdentitiesByAttributeRequest = _reflection.GeneratedProtocolMessageType('GetIdentitiesByAttributeRequest', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITIESBYATTRIBUTEREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeRequest)
})
_sym_db.RegisterMessage(GetIdentitiesByAttributeRequest)
GetIdentitiesByAttributeResponse = _reflection.GeneratedProtocolMessageType('GetIdentitiesByAttributeResponse', (_message.Message,), {
'Identity' : _reflection.GeneratedProtocolMessageType('Identity', (_message.Message,), {
'CredentialsIdsEntry' : _reflection.GeneratedProtocolMessageType('CredentialsIdsEntry', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_CREDENTIALSIDSENTRY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity.CredentialsIdsEntry)
})
,
'DESCRIPTOR' : _GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse.Identity)
})
,
'DESCRIPTOR' : _GETIDENTITIESBYATTRIBUTERESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentitiesByAttributeResponse)
})
_sym_db.RegisterMessage(GetIdentitiesByAttributeResponse)
_sym_db.RegisterMessage(GetIdentitiesByAttributeResponse.Identity)
_sym_db.RegisterMessage(GetIdentitiesByAttributeResponse.Identity.CredentialsIdsEntry)
UpdateIdentityRequest = _reflection.GeneratedProtocolMessageType('UpdateIdentityRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEIDENTITYREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateIdentityRequest)
})
_sym_db.RegisterMessage(UpdateIdentityRequest)
UpdateIdentityResponse = _reflection.GeneratedProtocolMessageType('UpdateIdentityResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEIDENTITYRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateIdentityResponse)
})
_sym_db.RegisterMessage(UpdateIdentityResponse)
DeleteIdentityRequest = _reflection.GeneratedProtocolMessageType('DeleteIdentityRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEIDENTITYREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteIdentityRequest)
})
_sym_db.RegisterMessage(DeleteIdentityRequest)
DeleteIdentityResponse = _reflection.GeneratedProtocolMessageType('DeleteIdentityResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEIDENTITYRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteIdentityResponse)
})
_sym_db.RegisterMessage(DeleteIdentityResponse)
GetCredentialsRequest = _reflection.GeneratedProtocolMessageType('GetCredentialsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCREDENTIALSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetCredentialsRequest)
})
_sym_db.RegisterMessage(GetCredentialsRequest)
GetCredentialsResponse = _reflection.GeneratedProtocolMessageType('GetCredentialsResponse', (_message.Message,), {
'Credential' : _reflection.GeneratedProtocolMessageType('Credential', (_message.Message,), {
'DESCRIPTOR' : _GETCREDENTIALSRESPONSE_CREDENTIAL,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.Credential)
})
,
'CredentialsEntry' : _reflection.GeneratedProtocolMessageType('CredentialsEntry', (_message.Message,), {
'DESCRIPTOR' : _GETCREDENTIALSRESPONSE_CREDENTIALSENTRY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetCredentialsResponse.CredentialsEntry)
})
,
'DESCRIPTOR' : _GETCREDENTIALSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetCredentialsResponse)
})
_sym_db.RegisterMessage(GetCredentialsResponse)
_sym_db.RegisterMessage(GetCredentialsResponse.Credential)
_sym_db.RegisterMessage(GetCredentialsResponse.CredentialsEntry)
UpdateCredentialRequest = _reflection.GeneratedProtocolMessageType('UpdateCredentialRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATECREDENTIALREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateCredentialRequest)
})
_sym_db.RegisterMessage(UpdateCredentialRequest)
UpdateCredentialResponse = _reflection.GeneratedProtocolMessageType('UpdateCredentialResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATECREDENTIALRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateCredentialResponse)
})
_sym_db.RegisterMessage(UpdateCredentialResponse)
CreateConnectionRequest = _reflection.GeneratedProtocolMessageType('CreateConnectionRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATECONNECTIONREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateConnectionRequest)
})
_sym_db.RegisterMessage(CreateConnectionRequest)
CreateConnectionResponse = _reflection.GeneratedProtocolMessageType('CreateConnectionResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATECONNECTIONRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateConnectionResponse)
})
_sym_db.RegisterMessage(CreateConnectionResponse)
GetConnectionsRequest = _reflection.GeneratedProtocolMessageType('GetConnectionsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCONNECTIONSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetConnectionsRequest)
})
_sym_db.RegisterMessage(GetConnectionsRequest)
GetConnectionsResponse = _reflection.GeneratedProtocolMessageType('GetConnectionsResponse', (_message.Message,), {
'Connection' : _reflection.GeneratedProtocolMessageType('Connection', (_message.Message,), {
'DESCRIPTOR' : _GETCONNECTIONSRESPONSE_CONNECTION,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetConnectionsResponse.Connection)
})
,
'DESCRIPTOR' : _GETCONNECTIONSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetConnectionsResponse)
})
_sym_db.RegisterMessage(GetConnectionsResponse)
_sym_db.RegisterMessage(GetConnectionsResponse.Connection)
UpdateConnectionRequest = _reflection.GeneratedProtocolMessageType('UpdateConnectionRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATECONNECTIONREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateConnectionRequest)
})
_sym_db.RegisterMessage(UpdateConnectionRequest)
UpdateConnectionResponse = _reflection.GeneratedProtocolMessageType('UpdateConnectionResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATECONNECTIONRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateConnectionResponse)
})
_sym_db.RegisterMessage(UpdateConnectionResponse)
DeleteConnectionRequest = _reflection.GeneratedProtocolMessageType('DeleteConnectionRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETECONNECTIONREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteConnectionRequest)
})
_sym_db.RegisterMessage(DeleteConnectionRequest)
DeleteConnectionResponse = _reflection.GeneratedProtocolMessageType('DeleteConnectionResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETECONNECTIONRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteConnectionResponse)
})
_sym_db.RegisterMessage(DeleteConnectionResponse)
GetOAuth2ClientsRequest = _reflection.GeneratedProtocolMessageType('GetOAuth2ClientsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETOAUTH2CLIENTSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsRequest)
})
_sym_db.RegisterMessage(GetOAuth2ClientsRequest)
GetOAuth2ClientsResponse = _reflection.GeneratedProtocolMessageType('GetOAuth2ClientsResponse', (_message.Message,), {
'Client' : _reflection.GeneratedProtocolMessageType('Client', (_message.Message,), {
'DESCRIPTOR' : _GETOAUTH2CLIENTSRESPONSE_CLIENT,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse.Client)
})
,
'DESCRIPTOR' : _GETOAUTH2CLIENTSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetOAuth2ClientsResponse)
})
_sym_db.RegisterMessage(GetOAuth2ClientsResponse)
_sym_db.RegisterMessage(GetOAuth2ClientsResponse.Client)
CreateOAuth2ClientRequest = _reflection.GeneratedProtocolMessageType('CreateOAuth2ClientRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEOAUTH2CLIENTREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientRequest)
})
_sym_db.RegisterMessage(CreateOAuth2ClientRequest)
CreateOAuth2ClientResponse = _reflection.GeneratedProtocolMessageType('CreateOAuth2ClientResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEOAUTH2CLIENTRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateOAuth2ClientResponse)
})
_sym_db.RegisterMessage(CreateOAuth2ClientResponse)
UpdateOAuth2ClientRequest = _reflection.GeneratedProtocolMessageType('UpdateOAuth2ClientRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEOAUTH2CLIENTREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientRequest)
})
_sym_db.RegisterMessage(UpdateOAuth2ClientRequest)
UpdateOAuth2ClientResponse = _reflection.GeneratedProtocolMessageType('UpdateOAuth2ClientResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEOAUTH2CLIENTRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateOAuth2ClientResponse)
})
_sym_db.RegisterMessage(UpdateOAuth2ClientResponse)
DeleteOAuth2ClientRequest = _reflection.GeneratedProtocolMessageType('DeleteOAuth2ClientRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEOAUTH2CLIENTREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientRequest)
})
_sym_db.RegisterMessage(DeleteOAuth2ClientRequest)
DeleteOAuth2ClientResponse = _reflection.GeneratedProtocolMessageType('DeleteOAuth2ClientResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEOAUTH2CLIENTRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteOAuth2ClientResponse)
})
_sym_db.RegisterMessage(DeleteOAuth2ClientResponse)
GetAddressesRequest = _reflection.GeneratedProtocolMessageType('GetAddressesRequest', (_message.Message,), {
'DESCRIPTOR' : _GETADDRESSESREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetAddressesRequest)
})
_sym_db.RegisterMessage(GetAddressesRequest)
GetAddressesResponse = _reflection.GeneratedProtocolMessageType('GetAddressesResponse', (_message.Message,), {
'Address' : _reflection.GeneratedProtocolMessageType('Address', (_message.Message,), {
'DESCRIPTOR' : _GETADDRESSESRESPONSE_ADDRESS,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetAddressesResponse.Address)
})
,
'DESCRIPTOR' : _GETADDRESSESRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetAddressesResponse)
})
_sym_db.RegisterMessage(GetAddressesResponse)
_sym_db.RegisterMessage(GetAddressesResponse.Address)
GetAddressRequest = _reflection.GeneratedProtocolMessageType('GetAddressRequest', (_message.Message,), {
'DESCRIPTOR' : _GETADDRESSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetAddressRequest)
})
_sym_db.RegisterMessage(GetAddressRequest)
GetAddressResponse = _reflection.GeneratedProtocolMessageType('GetAddressResponse', (_message.Message,), {
'DESCRIPTOR' : _GETADDRESSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetAddressResponse)
})
_sym_db.RegisterMessage(GetAddressResponse)
UpdateAddressRequest = _reflection.GeneratedProtocolMessageType('UpdateAddressRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEADDRESSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateAddressRequest)
})
_sym_db.RegisterMessage(UpdateAddressRequest)
UpdateAddressResponse = _reflection.GeneratedProtocolMessageType('UpdateAddressResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEADDRESSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateAddressResponse)
})
_sym_db.RegisterMessage(UpdateAddressResponse)
GetTraitsRequest = _reflection.GeneratedProtocolMessageType('GetTraitsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTRAITSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetTraitsRequest)
})
_sym_db.RegisterMessage(GetTraitsRequest)
GetTraitsResponse = _reflection.GeneratedProtocolMessageType('GetTraitsResponse', (_message.Message,), {
'DESCRIPTOR' : _GETTRAITSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetTraitsResponse)
})
_sym_db.RegisterMessage(GetTraitsResponse)
UpdateTraitsRequest = _reflection.GeneratedProtocolMessageType('UpdateTraitsRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATETRAITSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateTraitsRequest)
})
_sym_db.RegisterMessage(UpdateTraitsRequest)
UpdateTraitsResponse = _reflection.GeneratedProtocolMessageType('UpdateTraitsResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATETRAITSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateTraitsResponse)
})
_sym_db.RegisterMessage(UpdateTraitsResponse)
GetIdentityLoginAttemptsRequest = _reflection.GeneratedProtocolMessageType('GetIdentityLoginAttemptsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITYLOGINATTEMPTSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsRequest)
})
_sym_db.RegisterMessage(GetIdentityLoginAttemptsRequest)
GetIdentityLoginAttemptsResponse = _reflection.GeneratedProtocolMessageType('GetIdentityLoginAttemptsResponse', (_message.Message,), {
'Attempt' : _reflection.GeneratedProtocolMessageType('Attempt', (_message.Message,), {
'DESCRIPTOR' : _GETIDENTITYLOGINATTEMPTSRESPONSE_ATTEMPT,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse.Attempt)
})
,
'DESCRIPTOR' : _GETIDENTITYLOGINATTEMPTSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdentityLoginAttemptsResponse)
})
_sym_db.RegisterMessage(GetIdentityLoginAttemptsResponse)
_sym_db.RegisterMessage(GetIdentityLoginAttemptsResponse.Attempt)
CreateIdSchemaRequest = _reflection.GeneratedProtocolMessageType('CreateIdSchemaRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEIDSCHEMAREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateIdSchemaRequest)
})
_sym_db.RegisterMessage(CreateIdSchemaRequest)
CreateIdSchemaResponse = _reflection.GeneratedProtocolMessageType('CreateIdSchemaResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEIDSCHEMARESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.CreateIdSchemaResponse)
})
_sym_db.RegisterMessage(CreateIdSchemaResponse)
GetIdSchemasRequest = _reflection.GeneratedProtocolMessageType('GetIdSchemasRequest', (_message.Message,), {
'DESCRIPTOR' : _GETIDSCHEMASREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdSchemasRequest)
})
_sym_db.RegisterMessage(GetIdSchemasRequest)
GetIdSchemasResponse = _reflection.GeneratedProtocolMessageType('GetIdSchemasResponse', (_message.Message,), {
'JsonSchema' : _reflection.GeneratedProtocolMessageType('JsonSchema', (_message.Message,), {
'DESCRIPTOR' : _GETIDSCHEMASRESPONSE_JSONSCHEMA,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse.JsonSchema)
})
,
'DESCRIPTOR' : _GETIDSCHEMASRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdSchemasResponse)
})
_sym_db.RegisterMessage(GetIdSchemasResponse)
_sym_db.RegisterMessage(GetIdSchemasResponse.JsonSchema)
GetIdSchemaRequest = _reflection.GeneratedProtocolMessageType('GetIdSchemaRequest', (_message.Message,), {
'DESCRIPTOR' : _GETIDSCHEMAREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdSchemaRequest)
})
_sym_db.RegisterMessage(GetIdSchemaRequest)
GetIdSchemaResponse = _reflection.GeneratedProtocolMessageType('GetIdSchemaResponse', (_message.Message,), {
'DESCRIPTOR' : _GETIDSCHEMARESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetIdSchemaResponse)
})
_sym_db.RegisterMessage(GetIdSchemaResponse)
GetDefaultIdSchemaRequest = _reflection.GeneratedProtocolMessageType('GetDefaultIdSchemaRequest', (_message.Message,), {
'DESCRIPTOR' : _GETDEFAULTIDSCHEMAREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaRequest)
})
_sym_db.RegisterMessage(GetDefaultIdSchemaRequest)
GetDefaultIdSchemaResponse = _reflection.GeneratedProtocolMessageType('GetDefaultIdSchemaResponse', (_message.Message,), {
'DESCRIPTOR' : _GETDEFAULTIDSCHEMARESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetDefaultIdSchemaResponse)
})
_sym_db.RegisterMessage(GetDefaultIdSchemaResponse)
UpdateIdSchemaRequest = _reflection.GeneratedProtocolMessageType('UpdateIdSchemaRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEIDSCHEMAREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateIdSchemaRequest)
})
_sym_db.RegisterMessage(UpdateIdSchemaRequest)
UpdateIdSchemaResponse = _reflection.GeneratedProtocolMessageType('UpdateIdSchemaResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEIDSCHEMARESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateIdSchemaResponse)
})
_sym_db.RegisterMessage(UpdateIdSchemaResponse)
MarkDefaultIdSchemaRequest = _reflection.GeneratedProtocolMessageType('MarkDefaultIdSchemaRequest', (_message.Message,), {
'DESCRIPTOR' : _MARKDEFAULTIDSCHEMAREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaRequest)
})
_sym_db.RegisterMessage(MarkDefaultIdSchemaRequest)
MarkDefaultIdSchemaResponse = _reflection.GeneratedProtocolMessageType('MarkDefaultIdSchemaResponse', (_message.Message,), {
'DESCRIPTOR' : _MARKDEFAULTIDSCHEMARESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.MarkDefaultIdSchemaResponse)
})
_sym_db.RegisterMessage(MarkDefaultIdSchemaResponse)
DeleteIdSchemaRequest = _reflection.GeneratedProtocolMessageType('DeleteIdSchemaRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEIDSCHEMAREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteIdSchemaRequest)
})
_sym_db.RegisterMessage(DeleteIdSchemaRequest)
DeleteIdSchemaResponse = _reflection.GeneratedProtocolMessageType('DeleteIdSchemaResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEIDSCHEMARESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.DeleteIdSchemaResponse)
})
_sym_db.RegisterMessage(DeleteIdSchemaResponse)
GetEmailsSetupRequest = _reflection.GeneratedProtocolMessageType('GetEmailsSetupRequest', (_message.Message,), {
'DESCRIPTOR' : _GETEMAILSSETUPREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetEmailsSetupRequest)
})
_sym_db.RegisterMessage(GetEmailsSetupRequest)
GetEmailsSetupResponse = _reflection.GeneratedProtocolMessageType('GetEmailsSetupResponse', (_message.Message,), {
'EmailTemplate' : _reflection.GeneratedProtocolMessageType('EmailTemplate', (_message.Message,), {
'DESCRIPTOR' : _GETEMAILSSETUPRESPONSE_EMAILTEMPLATE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailTemplate)
})
,
'EmailSender' : _reflection.GeneratedProtocolMessageType('EmailSender', (_message.Message,), {
'DESCRIPTOR' : _GETEMAILSSETUPRESPONSE_EMAILSENDER,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse.EmailSender)
})
,
'DESCRIPTOR' : _GETEMAILSSETUPRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetEmailsSetupResponse)
})
_sym_db.RegisterMessage(GetEmailsSetupResponse)
_sym_db.RegisterMessage(GetEmailsSetupResponse.EmailTemplate)
_sym_db.RegisterMessage(GetEmailsSetupResponse.EmailSender)
UpdateEmailsSetupRequest = _reflection.GeneratedProtocolMessageType('UpdateEmailsSetupRequest', (_message.Message,), {
'EmailTemplate' : _reflection.GeneratedProtocolMessageType('EmailTemplate', (_message.Message,), {
'DESCRIPTOR' : _UPDATEEMAILSSETUPREQUEST_EMAILTEMPLATE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailTemplate)
})
,
'EmailSender' : _reflection.GeneratedProtocolMessageType('EmailSender', (_message.Message,), {
'DESCRIPTOR' : _UPDATEEMAILSSETUPREQUEST_EMAILSENDER,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest.EmailSender)
})
,
'DESCRIPTOR' : _UPDATEEMAILSSETUPREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupRequest)
})
_sym_db.RegisterMessage(UpdateEmailsSetupRequest)
_sym_db.RegisterMessage(UpdateEmailsSetupRequest.EmailTemplate)
_sym_db.RegisterMessage(UpdateEmailsSetupRequest.EmailSender)
UpdateEmailsSetupResponse = _reflection.GeneratedProtocolMessageType('UpdateEmailsSetupResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEEMAILSSETUPRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.UpdateEmailsSetupResponse)
})
_sym_db.RegisterMessage(UpdateEmailsSetupResponse)
GetUserBaseStatisticsRequest = _reflection.GeneratedProtocolMessageType('GetUserBaseStatisticsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETUSERBASESTATISTICSREQUEST,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsRequest)
})
_sym_db.RegisterMessage(GetUserBaseStatisticsRequest)
GetUserBaseStatisticsResponse = _reflection.GeneratedProtocolMessageType('GetUserBaseStatisticsResponse', (_message.Message,), {
'UsersPerDayEntry' : _reflection.GeneratedProtocolMessageType('UsersPerDayEntry', (_message.Message,), {
'DESCRIPTOR' : _GETUSERBASESTATISTICSRESPONSE_USERSPERDAYENTRY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.UsersPerDayEntry)
})
,
'SessionsPerDayEntry' : _reflection.GeneratedProtocolMessageType('SessionsPerDayEntry', (_message.Message,), {
'DESCRIPTOR' : _GETUSERBASESTATISTICSRESPONSE_SESSIONSPERDAYENTRY,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse.SessionsPerDayEntry)
})
,
'DESCRIPTOR' : _GETUSERBASESTATISTICSRESPONSE,
'__module__' : 'devtools.auth.v0.proto.identity.admin.admin_pb2'
# @@protoc_insertion_point(class_scope:depot.devtools.auth.v0.identity.admin.GetUserBaseStatisticsResponse)
})
_sym_db.RegisterMessage(GetUserBaseStatisticsResponse)
_sym_db.RegisterMessage(GetUserBaseStatisticsResponse.UsersPerDayEntry)
_sym_db.RegisterMessage(GetUserBaseStatisticsResponse.SessionsPerDayEntry)
DESCRIPTOR._options = None
_GETIDENTITYRESPONSE_CREDENTIALSIDSENTRY._options = None
_GETIDENTITIESBYATTRIBUTERESPONSE_IDENTITY_CREDENTIALSIDSENTRY._options = None
_GETCREDENTIALSRESPONSE_CREDENTIALSENTRY._options = None
_GETUSERBASESTATISTICSRESPONSE_USERSPERDAYENTRY._options = None
_GETUSERBASESTATISTICSRESPONSE_SESSIONSPERDAYENTRY._options = None
_ADMIN = _descriptor.ServiceDescriptor(
name='Admin',
full_name='depot.devtools.auth.v0.identity.admin.Admin',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=20668,
serialized_end=25396,
methods=[
_descriptor.MethodDescriptor(
name='CreateIdentity',
full_name='depot.devtools.auth.v0.identity.admin.Admin.CreateIdentity',
index=0,
containing_service=None,
input_type=_CREATEIDENTITYREQUEST,
output_type=_CREATEIDENTITYRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetIdentity',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetIdentity',
index=1,
containing_service=None,
input_type=_GETIDENTITYREQUEST,
output_type=_GETIDENTITYRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetIdentitiesByAttribute',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetIdentitiesByAttribute',
index=2,
containing_service=None,
input_type=_GETIDENTITIESBYATTRIBUTEREQUEST,
output_type=_GETIDENTITIESBYATTRIBUTERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetIdentities',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetIdentities',
index=3,
containing_service=None,
input_type=_GETIDENTITIESREQUEST,
output_type=_GETIDENTITIESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateIdentity',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateIdentity',
index=4,
containing_service=None,
input_type=_UPDATEIDENTITYREQUEST,
output_type=_UPDATEIDENTITYRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteIdentity',
full_name='depot.devtools.auth.v0.identity.admin.Admin.DeleteIdentity',
index=5,
containing_service=None,
input_type=_DELETEIDENTITYREQUEST,
output_type=_DELETEIDENTITYRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetAddresses',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetAddresses',
index=6,
containing_service=None,
input_type=_GETADDRESSESREQUEST,
output_type=_GETADDRESSESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetAddress',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetAddress',
index=7,
containing_service=None,
input_type=_GETADDRESSREQUEST,
output_type=_GETADDRESSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateAddress',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateAddress',
index=8,
containing_service=None,
input_type=_UPDATEADDRESSREQUEST,
output_type=_UPDATEADDRESSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetTraits',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetTraits',
index=9,
containing_service=None,
input_type=_GETTRAITSREQUEST,
output_type=_GETTRAITSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateTraits',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateTraits',
index=10,
containing_service=None,
input_type=_UPDATETRAITSREQUEST,
output_type=_UPDATETRAITSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetCredentials',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetCredentials',
index=11,
containing_service=None,
input_type=_GETCREDENTIALSREQUEST,
output_type=_GETCREDENTIALSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateCredential',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateCredential',
index=12,
containing_service=None,
input_type=_UPDATECREDENTIALREQUEST,
output_type=_UPDATECREDENTIALRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetIdentityLoginAttempts',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetIdentityLoginAttempts',
index=13,
containing_service=None,
input_type=_GETIDENTITYLOGINATTEMPTSREQUEST,
output_type=_GETIDENTITYLOGINATTEMPTSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateConnection',
full_name='depot.devtools.auth.v0.identity.admin.Admin.CreateConnection',
index=14,
containing_service=None,
input_type=_CREATECONNECTIONREQUEST,
output_type=_CREATECONNECTIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetConnections',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetConnections',
index=15,
containing_service=None,
input_type=_GETCONNECTIONSREQUEST,
output_type=_GETCONNECTIONSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateConnection',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateConnection',
index=16,
containing_service=None,
input_type=_UPDATECONNECTIONREQUEST,
output_type=_UPDATECONNECTIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteConnection',
full_name='depot.devtools.auth.v0.identity.admin.Admin.DeleteConnection',
index=17,
containing_service=None,
input_type=_DELETECONNECTIONREQUEST,
output_type=_DELETECONNECTIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateIdSchema',
full_name='depot.devtools.auth.v0.identity.admin.Admin.CreateIdSchema',
index=18,
containing_service=None,
input_type=_CREATEIDSCHEMAREQUEST,
output_type=_CREATEIDSCHEMARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetIdSchemas',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetIdSchemas',
index=19,
containing_service=None,
input_type=_GETIDSCHEMASREQUEST,
output_type=_GETIDSCHEMASRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetIdSchema',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetIdSchema',
index=20,
containing_service=None,
input_type=_GETIDSCHEMAREQUEST,
output_type=_GETIDSCHEMARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetDefaultIdSchema',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetDefaultIdSchema',
index=21,
containing_service=None,
input_type=_GETDEFAULTIDSCHEMAREQUEST,
output_type=_GETDEFAULTIDSCHEMARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateIdSchema',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateIdSchema',
index=22,
containing_service=None,
input_type=_UPDATEIDSCHEMAREQUEST,
output_type=_UPDATEIDSCHEMARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='MarkDefaultIdSchema',
full_name='depot.devtools.auth.v0.identity.admin.Admin.MarkDefaultIdSchema',
index=23,
containing_service=None,
input_type=_MARKDEFAULTIDSCHEMAREQUEST,
output_type=_MARKDEFAULTIDSCHEMARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteIdSchema',
full_name='depot.devtools.auth.v0.identity.admin.Admin.DeleteIdSchema',
index=24,
containing_service=None,
input_type=_DELETEIDSCHEMAREQUEST,
output_type=_DELETEIDSCHEMARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateOAuth2Client',
full_name='depot.devtools.auth.v0.identity.admin.Admin.CreateOAuth2Client',
index=25,
containing_service=None,
input_type=_CREATEOAUTH2CLIENTREQUEST,
output_type=_CREATEOAUTH2CLIENTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetOAuth2Clients',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetOAuth2Clients',
index=26,
containing_service=None,
input_type=_GETOAUTH2CLIENTSREQUEST,
output_type=_GETOAUTH2CLIENTSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateOAuth2Client',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateOAuth2Client',
index=27,
containing_service=None,
input_type=_UPDATEOAUTH2CLIENTREQUEST,
output_type=_UPDATEOAUTH2CLIENTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteOAuth2Client',
full_name='depot.devtools.auth.v0.identity.admin.Admin.DeleteOAuth2Client',
index=28,
containing_service=None,
input_type=_DELETEOAUTH2CLIENTREQUEST,
output_type=_DELETEOAUTH2CLIENTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetEmailsSetup',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetEmailsSetup',
index=29,
containing_service=None,
input_type=_GETEMAILSSETUPREQUEST,
output_type=_GETEMAILSSETUPRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateEmailsSetup',
full_name='depot.devtools.auth.v0.identity.admin.Admin.UpdateEmailsSetup',
index=30,
containing_service=None,
input_type=_UPDATEEMAILSSETUPREQUEST,
output_type=_UPDATEEMAILSSETUPRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetUserBaseStatistics',
full_name='depot.devtools.auth.v0.identity.admin.Admin.GetUserBaseStatistics',
index=31,
containing_service=None,
input_type=_GETUSERBASESTATISTICSREQUEST,
output_type=_GETUSERBASESTATISTICSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_ADMIN)
DESCRIPTOR.services_by_name['Admin'] = _ADMIN
# @@protoc_insertion_point(module_scope)
| 50.680169
| 37,696
| 0.776054
| 40,091
| 335,300
| 6.180689
| 0.029932
| 0.046491
| 0.083809
| 0.069736
| 0.833787
| 0.808576
| 0.802611
| 0.773772
| 0.760801
| 0.725909
| 0
| 0.038864
| 0.107525
| 335,300
| 6,615
| 37,697
| 50.687831
| 0.789181
| 0.025509
| 0
| 0.738278
| 1
| 0.003668
| 0.218713
| 0.181935
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.001276
| 0.001116
| 0
| 0.001116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7e4fb560f0e1186dd2f1381722e5bdaeaa0b49cb
| 9,604
|
py
|
Python
|
signbank/dictionary/migrations/0003_auto_20170331_1546.py
|
gstvob/SignBank-Brasil
|
16a7b2341e56a098de828bda132da2e38815f2ec
|
[
"BSD-3-Clause"
] | 11
|
2018-01-27T17:28:40.000Z
|
2021-11-08T09:02:08.000Z
|
signbank/dictionary/migrations/0003_auto_20170331_1546.py
|
gstvob/SignBank-Brasil
|
16a7b2341e56a098de828bda132da2e38815f2ec
|
[
"BSD-3-Clause"
] | 467
|
2018-01-25T11:23:27.000Z
|
2022-03-31T14:51:22.000Z
|
signbank/dictionary/migrations/0003_auto_20170331_1546.py
|
gstvob/SignBank-Brasil
|
16a7b2341e56a098de828bda132da2e38815f2ec
|
[
"BSD-3-Clause"
] | 9
|
2018-03-02T10:36:29.000Z
|
2021-06-11T15:25:25.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-31 13:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dictionary', '0002_auto_20170323_1337'),
]
operations = [
migrations.AlterField(
model_name='gloss',
name='domhndsh',
field=models.CharField(blank=True, choices=[('0', '-'), ('1', 'N/A'), ('89', '#A'), ('90', '#B'), ('162', '#B > #A'), ('91', '#B > #L'), ('92', '#B > #S'), ('157', '#B > middlefinger'), ('93', '#C'), ('161', '#C > #I'), ('94', '#D'), ('95', '#E'), ('96', '#F'), ('97', '#F > #L'), ('163', '#G'), ('164', '#G'), ('98', '#I'), ('110', '#I > #L'), ('158', '#I > #N'), ('100', '#I > #R'), ('99', '#I > #W'), ('108', '#I > 1'), ('109', '#I > N'), ('42', '#K'), ('101', '#L'), ('102', '#M'), ('103', '#M > #V'), ('104', '#N'), ('105', '#N > #E'), ('165', '#O'), ('107', '#O > #K'), ('148', '#P'), ('166', '#Q'), ('167', '#R'), ('156', '#S'), ('112', '#S > #I'), ('111', '#S > #W'), ('113', '#T'), ('141', '#T > #L'), ('150', '#T > #V'), ('160', '#V'), ('115', '#W'), ('168', '#Y'), ('159', '#Y > #R'), ('15', '1'), ('21', '1_curved'), ('37', '3'), ('35', '4'), ('3', '5'), ('149', '5 > #V'), ('30', '5m'), ('43', '5mx'), ('19', '5r'), ('49', '5rx'), ('48', '9'), ('117', '=1'), ('118', '=3'), ('119', '=3 > =0'), ('120', '=4'), ('121', '=4 > =0'), ('122', '=5'), ('123', '=9'), ('124', '=A'), ('143', '=A > =T'), ('126', '=Baby_O'), ('127', '=F'), ('128', '=L'), ('144', '=L > =O'), ('129', '=O'), ('130', '=T'), ('131', '=V'), ('133', '=W'), ('134', '=W > =Beak2'), ('169', '=variable'), ('24', 'A'), ('153', 'A > 5'), ('6', 'B'), ('138', 'B+1'), ('26', 'B_bent'), ('13', 'B_curved'), ('62', 'Baby_C'), ('38', 'Baby_O'), ('29', 'Baby_beak'), ('155', 'Baby_beak > A'), ('83', 'Baby_beak_open'), ('22', 'Beak'), ('17', 'Beak2'), ('85', 'Beak2_open'), ('152', 'Beak2_open_spread'), ('23', 'Beak2_spread'), ('84', 'Beak_open'), ('87', 'Beak_open_spread'), ('86', 'Beak_spread'), ('27', 'C'), ('145', 'C > I'), ('136', 'C2_closed'), ('75', 'C2_spread'), ('12', 'C_spread'), ('34', 'D'), ('39', 'F'), ('59', 'Horns'), ('25', 'I'), ('146', 'I > #L'), ('147', 'I > #N'), ('9', 'L'), ('52', 'L2'), ('40', 'M'), ('80', 'Middle finger'), ('4', 'Money'), ('14', 'N'), ('142', 'N > B'), ('32', 'O'), ('139', 'O > 5m'), ('135', 'Other'), ('18', 'Q'), ('47', 'R'), ('8', 'S'), ('28', 'T'), ('154', 'T_open'), ('5', 'V'), ('20', 'V_curved'), ('16', 'W'), ('7', 'Y')], max_length=5, null=True, verbose_name='Strong Hand'),
),
migrations.AlterField(
model_name='gloss',
name='final_domhndsh',
field=models.CharField(blank=True, choices=[('0', '-'), ('1', 'N/A'), ('89', '#A'), ('90', '#B'), ('162', '#B > #A'), ('91', '#B > #L'), ('92', '#B > #S'), ('157', '#B > middlefinger'), ('93', '#C'), ('161', '#C > #I'), ('94', '#D'), ('95', '#E'), ('96', '#F'), ('97', '#F > #L'), ('163', '#G'), ('164', '#G'), ('98', '#I'), ('110', '#I > #L'), ('158', '#I > #N'), ('100', '#I > #R'), ('99', '#I > #W'), ('108', '#I > 1'), ('109', '#I > N'), ('42', '#K'), ('101', '#L'), ('102', '#M'), ('103', '#M > #V'), ('104', '#N'), ('105', '#N > #E'), ('165', '#O'), ('107', '#O > #K'), ('148', '#P'), ('166', '#Q'), ('167', '#R'), ('156', '#S'), ('112', '#S > #I'), ('111', '#S > #W'), ('113', '#T'), ('141', '#T > #L'), ('150', '#T > #V'), ('160', '#V'), ('115', '#W'), ('168', '#Y'), ('159', '#Y > #R'), ('15', '1'), ('21', '1_curved'), ('37', '3'), ('35', '4'), ('3', '5'), ('149', '5 > #V'), ('30', '5m'), ('43', '5mx'), ('19', '5r'), ('49', '5rx'), ('48', '9'), ('117', '=1'), ('118', '=3'), ('119', '=3 > =0'), ('120', '=4'), ('121', '=4 > =0'), ('122', '=5'), ('123', '=9'), ('124', '=A'), ('143', '=A > =T'), ('126', '=Baby_O'), ('127', '=F'), ('128', '=L'), ('144', '=L > =O'), ('129', '=O'), ('130', '=T'), ('131', '=V'), ('133', '=W'), ('134', '=W > =Beak2'), ('169', '=variable'), ('24', 'A'), ('153', 'A > 5'), ('6', 'B'), ('138', 'B+1'), ('26', 'B_bent'), ('13', 'B_curved'), ('62', 'Baby_C'), ('38', 'Baby_O'), ('29', 'Baby_beak'), ('155', 'Baby_beak > A'), ('83', 'Baby_beak_open'), ('22', 'Beak'), ('17', 'Beak2'), ('85', 'Beak2_open'), ('152', 'Beak2_open_spread'), ('23', 'Beak2_spread'), ('84', 'Beak_open'), ('87', 'Beak_open_spread'), ('86', 'Beak_spread'), ('27', 'C'), ('145', 'C > I'), ('136', 'C2_closed'), ('75', 'C2_spread'), ('12', 'C_spread'), ('34', 'D'), ('39', 'F'), ('59', 'Horns'), ('25', 'I'), ('146', 'I > #L'), ('147', 'I > #N'), ('9', 'L'), ('52', 'L2'), ('40', 'M'), ('80', 'Middle finger'), ('4', 'Money'), ('14', 'N'), ('142', 'N > B'), ('32', 'O'), ('139', 'O > 5m'), ('135', 'Other'), ('18', 'Q'), ('47', 'R'), ('8', 'S'), ('28', 'T'), ('154', 'T_open'), ('5', 'V'), ('20', 'V_curved'), ('16', 'W'), ('7', 'Y')], max_length=5, null=True, verbose_name='Final Dominant Handshape'),
),
migrations.AlterField(
model_name='gloss',
name='final_subhndsh',
field=models.CharField(blank=True, choices=[('0', '-'), ('1', 'N/A'), ('89', '#A'), ('90', '#B'), ('162', '#B > #A'), ('91', '#B > #L'), ('92', '#B > #S'), ('157', '#B > middlefinger'), ('93', '#C'), ('161', '#C > #I'), ('94', '#D'), ('95', '#E'), ('96', '#F'), ('97', '#F > #L'), ('163', '#G'), ('164', '#G'), ('98', '#I'), ('110', '#I > #L'), ('158', '#I > #N'), ('100', '#I > #R'), ('99', '#I > #W'), ('108', '#I > 1'), ('109', '#I > N'), ('42', '#K'), ('101', '#L'), ('102', '#M'), ('103', '#M > #V'), ('104', '#N'), ('105', '#N > #E'), ('165', '#O'), ('107', '#O > #K'), ('148', '#P'), ('166', '#Q'), ('167', '#R'), ('156', '#S'), ('112', '#S > #I'), ('111', '#S > #W'), ('113', '#T'), ('141', '#T > #L'), ('150', '#T > #V'), ('160', '#V'), ('115', '#W'), ('168', '#Y'), ('159', '#Y > #R'), ('15', '1'), ('21', '1_curved'), ('37', '3'), ('35', '4'), ('3', '5'), ('149', '5 > #V'), ('30', '5m'), ('43', '5mx'), ('19', '5r'), ('49', '5rx'), ('48', '9'), ('117', '=1'), ('118', '=3'), ('119', '=3 > =0'), ('120', '=4'), ('121', '=4 > =0'), ('122', '=5'), ('123', '=9'), ('124', '=A'), ('143', '=A > =T'), ('126', '=Baby_O'), ('127', '=F'), ('128', '=L'), ('144', '=L > =O'), ('129', '=O'), ('130', '=T'), ('131', '=V'), ('133', '=W'), ('134', '=W > =Beak2'), ('169', '=variable'), ('24', 'A'), ('153', 'A > 5'), ('6', 'B'), ('138', 'B+1'), ('26', 'B_bent'), ('13', 'B_curved'), ('62', 'Baby_C'), ('38', 'Baby_O'), ('29', 'Baby_beak'), ('155', 'Baby_beak > A'), ('83', 'Baby_beak_open'), ('22', 'Beak'), ('17', 'Beak2'), ('85', 'Beak2_open'), ('152', 'Beak2_open_spread'), ('23', 'Beak2_spread'), ('84', 'Beak_open'), ('87', 'Beak_open_spread'), ('86', 'Beak_spread'), ('27', 'C'), ('145', 'C > I'), ('136', 'C2_closed'), ('75', 'C2_spread'), ('12', 'C_spread'), ('34', 'D'), ('39', 'F'), ('59', 'Horns'), ('25', 'I'), ('146', 'I > #L'), ('147', 'I > #N'), ('9', 'L'), ('52', 'L2'), ('40', 'M'), ('80', 'Middle finger'), ('4', 'Money'), ('14', 'N'), ('142', 'N > B'), ('32', 'O'), ('139', 'O > 5m'), ('135', 'Other'), ('18', 'Q'), ('47', 'R'), ('8', 'S'), ('28', 'T'), ('154', 'T_open'), ('5', 'V'), ('20', 'V_curved'), ('16', 'W'), ('7', 'Y')], max_length=5, null=True, verbose_name='Final Subordinate Handshape'),
),
migrations.AlterField(
model_name='gloss',
name='subhndsh',
field=models.CharField(blank=True, choices=[('0', '-'), ('1', 'N/A'), ('89', '#A'), ('90', '#B'), ('162', '#B > #A'), ('91', '#B > #L'), ('92', '#B > #S'), ('157', '#B > middlefinger'), ('93', '#C'), ('161', '#C > #I'), ('94', '#D'), ('95', '#E'), ('96', '#F'), ('97', '#F > #L'), ('163', '#G'), ('164', '#G'), ('98', '#I'), ('110', '#I > #L'), ('158', '#I > #N'), ('100', '#I > #R'), ('99', '#I > #W'), ('108', '#I > 1'), ('109', '#I > N'), ('42', '#K'), ('101', '#L'), ('102', '#M'), ('103', '#M > #V'), ('104', '#N'), ('105', '#N > #E'), ('165', '#O'), ('107', '#O > #K'), ('148', '#P'), ('166', '#Q'), ('167', '#R'), ('156', '#S'), ('112', '#S > #I'), ('111', '#S > #W'), ('113', '#T'), ('141', '#T > #L'), ('150', '#T > #V'), ('160', '#V'), ('115', '#W'), ('168', '#Y'), ('159', '#Y > #R'), ('15', '1'), ('21', '1_curved'), ('37', '3'), ('35', '4'), ('3', '5'), ('149', '5 > #V'), ('30', '5m'), ('43', '5mx'), ('19', '5r'), ('49', '5rx'), ('48', '9'), ('117', '=1'), ('118', '=3'), ('119', '=3 > =0'), ('120', '=4'), ('121', '=4 > =0'), ('122', '=5'), ('123', '=9'), ('124', '=A'), ('143', '=A > =T'), ('126', '=Baby_O'), ('127', '=F'), ('128', '=L'), ('144', '=L > =O'), ('129', '=O'), ('130', '=T'), ('131', '=V'), ('133', '=W'), ('134', '=W > =Beak2'), ('169', '=variable'), ('24', 'A'), ('153', 'A > 5'), ('6', 'B'), ('138', 'B+1'), ('26', 'B_bent'), ('13', 'B_curved'), ('62', 'Baby_C'), ('38', 'Baby_O'), ('29', 'Baby_beak'), ('155', 'Baby_beak > A'), ('83', 'Baby_beak_open'), ('22', 'Beak'), ('17', 'Beak2'), ('85', 'Beak2_open'), ('152', 'Beak2_open_spread'), ('23', 'Beak2_spread'), ('84', 'Beak_open'), ('87', 'Beak_open_spread'), ('86', 'Beak_spread'), ('27', 'C'), ('145', 'C > I'), ('136', 'C2_closed'), ('75', 'C2_spread'), ('12', 'C_spread'), ('34', 'D'), ('39', 'F'), ('59', 'Horns'), ('25', 'I'), ('146', 'I > #L'), ('147', 'I > #N'), ('9', 'L'), ('52', 'L2'), ('40', 'M'), ('80', 'Middle finger'), ('4', 'Money'), ('14', 'N'), ('142', 'N > B'), ('32', 'O'), ('139', 'O > 5m'), ('135', 'Other'), ('18', 'Q'), ('47', 'R'), ('8', 'S'), ('28', 'T'), ('154', 'T_open'), ('5', 'V'), ('20', 'V_curved'), ('16', 'W'), ('7', 'Y')], max_length=5, null=True, verbose_name='Weak Hand'),
),
]
| 266.777778
| 2,228
| 0.3611
| 1,345
| 9,604
| 2.49368
| 0.159851
| 0.007156
| 0.029815
| 0.034586
| 0.930829
| 0.930829
| 0.919499
| 0.877162
| 0.877162
| 0.877162
| 0
| 0.169215
| 0.162536
| 9,604
| 35
| 2,229
| 274.4
| 0.247793
| 0.00708
| 0
| 0.428571
| 1
| 0
| 0.38351
| 0.002413
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.178571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7e71ef068bb3ac590bf9076882478e9e0492caa4
| 244,838
|
py
|
Python
|
Integrations/python/deephaven/Plot/figure_wrapper.py
|
chrisabidin/deephaven-core
|
ca6609e75dbc84fa4fa7fa89abf68f1e2bc81793
|
[
"MIT"
] | 55
|
2021-05-11T16:01:59.000Z
|
2022-03-30T14:30:33.000Z
|
Integrations/python/deephaven/Plot/figure_wrapper.py
|
chrisabidin/deephaven-core
|
ca6609e75dbc84fa4fa7fa89abf68f1e2bc81793
|
[
"MIT"
] | 943
|
2021-05-10T14:00:02.000Z
|
2022-03-31T21:28:15.000Z
|
Integrations/python/deephaven/Plot/figure_wrapper.py
|
rcaudy/deephaven-core
|
b740bb3367dc8ece0a291668b5b363a6c61d699c
|
[
"MIT"
] | 29
|
2021-05-10T11:33:16.000Z
|
2022-03-30T21:01:54.000Z
|
"""
A figure for creating plots.
"""
#
# Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending
#
######################################################################################################################
# This code is auto generated. DO NOT EDIT FILE!
# Run generatePythonFigureWrapper or "./gradlew :Generators:generatePythonFigureWrapper" to generate
######################################################################################################################
import sys
import logging
import jpy
import numpy
import pandas
import wrapt
from ..conversion_utils import _isJavaType, _isStr, makeJavaArray, _ensureBoxedArray, getJavaClassObject
_plotting_convenience_ = None # this module will be useless with no jvm
_figure_widget_ = None
def _defineSymbols():
"""
Defines appropriate java symbols, which requires that the jvm has been initialized through the :class:`jpy` module,
for use throughout the module AT RUNTIME. This is versus static definition upon first import, which would lead to an
exception if the jvm wasn't initialized BEFORE importing the module.
"""
if not jpy.has_jvm():
raise SystemError("No java functionality can be used until the JVM has been initialized through the jpy module")
global _plotting_convenience_, _figure_widget_
if _plotting_convenience_ is None:
# an exception will be raised if not in the jvm classpath
_plotting_convenience_ = jpy.get_type("io.deephaven.plot.PlottingConvenience")
_figure_widget_ = jpy.get_type('io.deephaven.plot.FigureWidget')
if sys.version_info[0] > 2:
def _is_basic_type_(obj):
return isinstance(obj, bool) or isinstance(obj, int) or isinstance(obj, float) or isinstance(obj, str)
else:
def _is_basic_type_(obj):
return isinstance(obj, bool) or isinstance(obj, int) or isinstance(obj, long) \
or isinstance(obj, float) or isinstance(obj, basestring)
def _is_widget_(obj):
if obj is None:
return False
cond = False
try:
cond = getJavaClassObject('io.deephaven.plot.FigureWidget').isAssignableFrom(obj)
except Exception:
pass
return cond
def _create_java_object_(obj):
if obj is None:
return None
elif isinstance(obj, FigureWrapper) or _isJavaType(obj):
# nothing to be done
return obj
elif _is_basic_type_(obj):
# jpy will (*should*) convert this properly
return obj
elif isinstance(obj, numpy.ndarray) or isinstance(obj, pandas.Series) or isinstance(obj, pandas.Categorical):
return makeJavaArray(obj, 'unknown', False)
elif isinstance(obj, dict):
return obj # what would we do?
elif isinstance(obj, list) or isinstance(obj, tuple):
return _create_java_object_(numpy.array(obj)) # maybe it's better to pass it straight through?
elif hasattr(obj, '__iter__'):
# return _create_java_object_(numpy.array(list(obj))) # this is suspect
return obj
else:
# I have no idea what it is - just pass it straight through
return obj
def _convert_arguments_(args):
return [_create_java_object_(el) for el in args]
@wrapt.decorator
def _convertArguments(wrapped, instance, args, kwargs):
"""
For decoration of FigureWrapper class methods, to convert arguments as necessary
:param wrapped: the method to be decorated
:param instance: the object to which the wrapped function was bound when it was called
:param args: the argument list for `wrapped`
:param kwargs: the keyword argument dictionary for `wrapped`
:return: the decorated version of the method
"""
return wrapped(*_convert_arguments_(args))
@wrapt.decorator
def _convertCatPlotArguments(wrapped, instance, args, kwargs):
"""
For decoration of FigureWrapper catPlot, catErrorBar, piePlot method, to convert arguments
:param wrapped: the method to be decorated
:param instance: the object to which the wrapped function was bound when it was called
:param args: the argument list for `wrapped`
:param kwargs: the keyword argument dictionary for `wrapped`
:return: the decorated version of the method
"""
cargs = _convert_arguments_(args)
cargs[1] = _ensureBoxedArray(cargs[1]) # the category field must extend Number (i.e. be boxed)
return wrapped(*cargs)
class FigureWrapper(object):
"""
Class which assembles a variety of plotting convenience methods into a single usable package
"""
def __init__(self, *args, **kwargs):
_defineSymbols()
figure = kwargs.get('figure', None)
if figure is None:
figure = _plotting_convenience_.figure(*_convert_arguments_(args))
self._figure = figure
self._valid_groups = None
@property
def figure(self):
"""The underlying java Figure object"""
return self._figure
@property
def widget(self):
"""The FigureWidget, if applicable. It will be `None` if .show() has NOT been called."""
if _is_widget_(self.figure.getClass()):
return self.figure
return None
@property
def validGroups(self):
"""The collection, (actually java array), of valid users"""
return _create_java_object_(self._valid_groups)
@validGroups.setter
def validGroups(self, groups):
if groups is None:
self._valid_groups = None
elif _isStr(groups):
self._valid_groups = [groups, ]
else:
try:
self._valid_groups = list(groups) # any other iterable will become a list
except Exception as e:
logging.error("Failed to set validGroups using input {} with exception {}".format(groups, e))
def show(self):
"""
Wraps the figure in a figure widget for display
:return: FigureWrapper with figure attribute set to applicable widget
"""
return FigureWrapper(figure=self._figure.show())
def getWidget(self):
"""
Get figure widget, if applicable. It will be `None` if .show() has NOT been called.
:return: None or the widget reference
"""
return self.widget
def getValidGroups(self):
"""
Get the collection of valid users
:return: java array of user id strings
"""
return self.validGroups
def setValidGroups(self, groups):
"""
Set the list of user ids which should have access to this figure wrapper object
:param groups: None, single user id string, or list of user id strings
"""
self.validGroups = groups
@_convertArguments
def axes(self, *args):
"""
Gets an axes.
*Overload 1*
:param name: (java.lang.String) - axes name.
:return: (io.deephaven.plot.Figure) selected axes.
*Overload 2*
:param id: (int) - axes id.
:return: (io.deephaven.plot.Figure) selected axes.
"""
return FigureWrapper(figure=self.figure.axes(*args))
@_convertArguments
def axesRemoveSeries(self, *names):
"""
Removes the series with the specified names from this Axes.
:param names: (java.lang.String...) - series names
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.axesRemoveSeries(*names))
@_convertArguments
def axis(self, dim):
"""
Gets the Axis at dimension dim. The x-axis is dimension 0, y-axis dimension 1.
:param dim: (int) - dimension of the Axis
:return: (io.deephaven.plot.Figure) Axis at dimension dim
"""
return FigureWrapper(figure=self.figure.axis(dim))
@_convertArguments
def axisColor(self, color):
"""
Sets the color for this Axis line and tick marks.
*Overload 1*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.axisColor(color))
@_convertArguments
def axisFormat(self, format):
"""
Sets the AxisFormat for this Axis.
:param format: (io.deephaven.plot.axisformatters.AxisFormat) - axis format
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.axisFormat(format))
@_convertArguments
def axisFormatPattern(self, pattern):
"""
Sets the format pattern for this Axis's labels.
:param pattern: (java.lang.String) - axis format pattern
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.axisFormatPattern(pattern))
@_convertArguments
def axisLabel(self, label):
"""
Sets the label for this Axis.
:param label: (java.lang.String) - label
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.axisLabel(label))
@_convertArguments
def axisLabelFont(self, *args):
"""
Sets the font for this Axis's label.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.axisLabelFont(*args))
@_convertArguments
def businessTime(self, *args):
"""
Sets this Axis's AxisTransform as an AxisTransformBusinessCalendar.
*Overload 1*
:return: (io.deephaven.plot.Figure) this Axis using the default business calendar.
*Overload 2*
:param calendar: (io.deephaven.time.calendar.BusinessCalendar) - business calendar of the AxisTransformBusinessCalendar
:return: (io.deephaven.plot.Figure) this Axis using the specified business calendar.
*Overload 3*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing the business calendar.
:param valueColumn: (java.lang.String) - name of a column containing String values, where each value is the name of a
BusinessCalendar.
:return: (io.deephaven.plot.Figure) this Axis using the business calendar from row 0 of the filtered sds for the business calendar.
If no value is found, no transform will be applied.
"""
return FigureWrapper(figure=self.figure.businessTime(*args))
@_convertCatPlotArguments
def catErrorBar(self, *args):
"""
Creates a category error bar plot with whiskers in the y direction.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (T1[]) - numeric data
:param yLow: (T2[]) - low value in y dimension
:param yHigh: (T3[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 2*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (double[]) - numeric data
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 3*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (float[]) - numeric data
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 4*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (int[]) - numeric data
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 5*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (long[]) - numeric data
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 6*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (io.deephaven.time.DateTime[]) - numeric data
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 7*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (java.util.Date[]) - numeric data
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 8*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (short[]) - numeric data
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 9*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (java.util.List<T1>) - numeric data
:param yLow: (java.util.List<T2>) - low value in y dimension
:param yHigh: (java.util.List<T3>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 10*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (T1[]) - numeric data
:param yLow: (T2[]) - low value in y dimension
:param yHigh: (T3[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 11*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (double[]) - numeric data
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 12*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (float[]) - numeric data
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 13*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (int[]) - numeric data
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 14*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (long[]) - numeric data
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 15*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (short[]) - numeric data
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (java.util.List<T1>) - numeric data
:param yLow: (java.util.List<T2>) - low value in y dimension
:param yHigh: (java.util.List<T3>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param categories: (java.lang.String) - column in t that holds the discrete data
:param values: (java.lang.String) - column in t that holds the numeric data
:param yLow: (java.lang.String) - column in t that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in t that holds the high value in the y dimension
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 18*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table).
:param categories: (java.lang.String) - column in sds that holds the discrete data
:param values: (java.lang.String) - column in sds that holds the numeric data
:param yLow: (java.lang.String) - column in sds that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in sds that holds the high value in the y dimension
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.catErrorBar(*args))
@_convertArguments
def catErrorBarBy(self, *args):
"""
Creates a catErrorBar plot for each distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param categories: (java.lang.String) - column in t that holds the discrete data
:param values: (java.lang.String) - column in t that holds the numeric data
:param yLow: (java.lang.String) - column in t that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in t that holds the high value in the y dimension
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table).
:param categories: (java.lang.String) - column in sds that holds the discrete data
:param values: (java.lang.String) - column in sds that holds the numeric data
:param yLow: (java.lang.String) - column in sds that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in sds that holds the high value in the y dimension
:param byColumns: (java.lang.String...) - column(s) in sds that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.catErrorBarBy(*args))
@_convertArguments
def catHistPlot(self, *args):
"""
Creates a histogram with discrete axis. Charts the frequency of each unique element in the input data.
*Overload 1*
Note: Java generics information - <T extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T[]) - data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 3*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 4*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 5*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 6*
Note: Java generics information - <T extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T>) - data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 7*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param columnName: (java.lang.String) - column in t
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 8*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param columnName: (java.lang.String) - column in sds
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.catHistPlot(*args))
@_convertCatPlotArguments
def catPlot(self, *args):
"""
**Incompatible overloads text - text from the first overload:**
Creates a plot with discrete axis.
Discrete data must not have duplicates.
*Overload 1*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (T1[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (double[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 3*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (float[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 4*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (int[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 5*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (long[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 6*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (io.deephaven.time.DateTime[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 7*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (java.util.Date[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 8*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (short[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 9*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - discrete data
:param values: (java.util.List<T1>) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 10*
Note: Java generics information - <T1 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (io.deephaven.plot.datasets.data.IndexableData<T1>) - discrete data
:param values: (io.deephaven.plot.datasets.data.IndexableNumericData) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 11*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (T1[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 12*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (double[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 13*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (float[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 14*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (int[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 15*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (long[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 16*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (io.deephaven.time.DateTime[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 17*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (java.util.Date[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 18*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (short[]) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 19*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - discrete data
:param values: (java.util.List<T1>) - numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 20*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param categories: (java.lang.String) - column in t holding discrete data
:param values: (java.lang.String) - column in t holding numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 21*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param categories: (java.lang.String) - column in sds holding discrete data
:param values: (java.lang.String) - column in sds holding numeric data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.catPlot(*args))
@_convertArguments
def catPlotBy(self, *args):
"""
Creates a category plot per distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param categories: (java.lang.String) - column in t holding discrete data
:param values: (java.lang.String) - column in t holding numeric data
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param categories: (java.lang.String) - column in sds holding discrete data
:param values: (java.lang.String) - column in sds holding numeric data
:param byColumns: (java.lang.String...) - column(s) in sds that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.catPlotBy(*args))
@_convertArguments
def chart(self, *args):
"""
Returns a chart from this Figure's grid.
*Overload 1*
:param index: (int) - index from the Figure's grid to remove. The index starts at 0 in the upper left hand corner of the
grid and increases going left to right, top to bottom. E.g. for a 2x2 Figure, the indices would be [0, 1]
[2, 3].
:return: (io.deephaven.plot.Figure) selected Chart
*Overload 2*
:param rowNum: (int) - row index in this Figure's grid. The row index starts at 0.
:param colNum: (int) - column index in this Figure's grid. The column index starts at 0.
:return: (io.deephaven.plot.Figure) selected Chart
"""
return FigureWrapper(figure=self.figure.chart(*args))
@_convertArguments
def chartRemoveSeries(self, *names):
"""
Removes the series with the specified names from this Chart.
:param names: (java.lang.String...) - series names
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.chartRemoveSeries(*names))
@_convertArguments
def chartTitle(self, *args):
"""
Sets the title of this Chart.
*Overload 1*
:param title: (java.lang.String) - title
:return: (io.deephaven.plot.Figure) this Chart
*Overload 2*
:param t: (io.deephaven.engine.table.Table) - table
:param titleColumns: (java.lang.String...) - columns to include in the chart title
:return: (io.deephaven.plot.Figure) this Chart with the title set to display comma-separated values from the table
*Overload 3*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick table)
:param titleColumns: (java.lang.String...) - columns to include in the chart title
:return: (io.deephaven.plot.Figure) this Chart with the title set to display comma-separated values from the table
*Overload 4*
:param showColumnNamesInTitle: (boolean) - Whether to show column names in title. If this is true, the title format will
include the column name before the comma separated values; otherwise only the comma separated values will
be included.
:param t: (io.deephaven.engine.table.Table) - table
:param titleColumns: (java.lang.String...) - columns to include in the chart title
:return: (io.deephaven.plot.Figure) this Chart with the title set to display comma-separated values from the table
*Overload 5*
:param showColumnNamesInTitle: (boolean) - Whether to show column names in title. If this is true, the title format will
include the column name before the comma separated values; otherwise only the comma separated values will
be included.
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick table)
:param titleColumns: (java.lang.String...) - columns to include in the chart title
:return: (io.deephaven.plot.Figure) this Chart with the title set to display comma-separated values from the table
*Overload 6*
:param titleFormat: (java.lang.String) - a MessageFormat format string for the chart title
:param t: (io.deephaven.engine.table.Table) - table
:param titleColumns: (java.lang.String...) - columns to include in the chart title
:return: (io.deephaven.plot.Figure) this Chart with the title set to display values from the table
*Overload 7*
:param titleFormat: (java.lang.String) - a MessageFormat format string for the chart title
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick table)
:param titleColumns: (java.lang.String...) - columns to include in the chart title
:return: (io.deephaven.plot.Figure) this Chart with the title set to display values from the table
"""
return FigureWrapper(figure=self.figure.chartTitle(*args))
@_convertArguments
def chartTitleColor(self, color):
"""
Sets the color of this Chart's title.
*Overload 1*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this Chart
*Overload 2*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.chartTitleColor(color))
@_convertArguments
def chartTitleFont(self, *args):
"""
Sets the font of this Chart's title.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Chart
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.chartTitleFont(*args))
@_convertArguments
def colSpan(self, n):
"""
Sets the size of this Chart within the grid of the figure.
:param n: (int) - how many columns wide
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.colSpan(n))
@_convertArguments
def errorBarColor(self, *args):
"""
Sets the error bar Paint for this dataset.
*Overload 1*
:param color: (int) - index of the color in the series color palette
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 2*
:param color: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 4*
:param color: io.deephaven.gui.color.Paint
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 6*
:param color: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.errorBarColor(*args))
@_convertArguments
def errorBarX(self, *args):
"""
Creates an XY plot with error bars in the x direction.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param xLow: (T1[]) - low value in x dimension
:param xHigh: (T2[]) - high value in x dimension
:param y: (T3[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param xLow: (T1[]) - low value in x dimension
:param xHigh: (T2[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 3*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param xLow: (T1[]) - low value in x dimension
:param xHigh: (T2[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 4*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param xLow: (double[]) - low value in x dimension
:param xHigh: (double[]) - high value in x dimension
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 5*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param xLow: (double[]) - low value in x dimension
:param xHigh: (double[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 6*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param xLow: (double[]) - low value in x dimension
:param xHigh: (double[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 7*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param xLow: (float[]) - low value in x dimension
:param xHigh: (float[]) - high value in x dimension
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 8*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param xLow: (float[]) - low value in x dimension
:param xHigh: (float[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 9*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param xLow: (float[]) - low value in x dimension
:param xHigh: (float[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 10*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param xLow: (int[]) - low value in x dimension
:param xHigh: (int[]) - high value in x dimension
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 11*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param xLow: (int[]) - low value in x dimension
:param xHigh: (int[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 12*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param xLow: (int[]) - low value in x dimension
:param xHigh: (int[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 13*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param xLow: (long[]) - low value in x dimension
:param xHigh: (long[]) - high value in x dimension
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 14*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param xLow: (long[]) - low value in x dimension
:param xHigh: (long[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 15*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param xLow: (long[]) - low value in x dimension
:param xHigh: (long[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 16*
Note: Java generics information - <T3 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (T3[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 18*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 19*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 20*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 21*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 22*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 23*
Note: Java generics information - <T3 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (java.util.List<T3>) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 24*
Note: Java generics information - <T3 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (T3[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 25*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 26*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 27*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 28*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 29*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 30*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 31*
Note: Java generics information - <T3 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (java.util.List<T3>) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 32*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param xLow: (short[]) - low value in x dimension
:param xHigh: (short[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 33*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param xLow: (short[]) - low value in x dimension
:param xHigh: (short[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 34*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param xLow: (short[]) - low value in x dimension
:param xHigh: (short[]) - high value in x dimension
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 35*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param xLow: (java.util.List<T1>) - low value in x dimension
:param xHigh: (java.util.List<T2>) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 36*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param xLow: (java.util.List<T1>) - low value in x dimension
:param xHigh: (java.util.List<T2>) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 37*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param xLow: (java.util.List<T1>) - low value in x dimension
:param xHigh: (java.util.List<T2>) - high value in x dimension
:param y: (java.util.List<T3>) - y-values
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 38*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param xLow: (java.lang.String) - column in t that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in t that holds the high value in the x dimension
:param y: (java.lang.String) - column in t that holds the y-variable data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 39*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param xLow: (java.lang.String) - column in sds that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in sds that holds the high value in the x dimension
:param y: (java.lang.String) - column in sds that holds the y-variable data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.errorBarX(*args))
@_convertArguments
def errorBarXBy(self, *args):
"""
Creates an errorBarX plot per distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param xLow: (java.lang.String) - column in t that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in t that holds the high value in the x dimension
:param y: (java.lang.String) - column in t that holds the y-variable data
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param xLow: (java.lang.String) - column in sds that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in sds that holds the high value in the x dimension
:param y: (java.lang.String) - column in sds that holds the y-variable data
:param byColumns: (java.lang.String...) - column(s) in sds that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.errorBarXBy(*args))
@_convertArguments
def errorBarXY(self, *args):
"""
Creates an XY plot with error bars in both the x and y directions.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param xLow: (T1[]) - low value in x dimension
:param xHigh: (T2[]) - high value in x dimension
:param y: (T3[]) - y-values
:param yLow: (T4[]) - low value in y dimension
:param yHigh: (T5[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param xLow: (T1[]) - low value in x dimension
:param xHigh: (T2[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 3*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param xLow: (T1[]) - low value in x dimension
:param xHigh: (T2[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 4*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param xLow: (double[]) - low value in x dimension
:param xHigh: (double[]) - high value in x dimension
:param y: (double[]) - y-values
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 5*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param xLow: (double[]) - low value in x dimension
:param xHigh: (double[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 6*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param xLow: (double[]) - low value in x dimension
:param xHigh: (double[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 7*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param xLow: (float[]) - low value in x dimension
:param xHigh: (float[]) - high value in x dimension
:param y: (float[]) - y-values
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 8*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param xLow: (float[]) - low value in x dimension
:param xHigh: (float[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 9*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param xLow: (float[]) - low value in x dimension
:param xHigh: (float[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 10*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param xLow: (int[]) - low value in x dimension
:param xHigh: (int[]) - high value in x dimension
:param y: (int[]) - y-values
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 11*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param xLow: (int[]) - low value in x dimension
:param xHigh: (int[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 12*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param xLow: (int[]) - low value in x dimension
:param xHigh: (int[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 13*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param xLow: (long[]) - low value in x dimension
:param xHigh: (long[]) - high value in x dimension
:param y: (long[]) - y-values
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 14*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param xLow: (long[]) - low value in x dimension
:param xHigh: (long[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 15*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param xLow: (long[]) - low value in x dimension
:param xHigh: (long[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (T3[]) - y-values
:param yLow: (T4[]) - low value in y dimension
:param yHigh: (T5[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (double[]) - y-values
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 18*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (float[]) - y-values
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 19*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (int[]) - y-values
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 20*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (long[]) - y-values
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 21*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 22*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (short[]) - y-values
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 23*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param xLow: (io.deephaven.time.DateTime[]) - low value in x dimension
:param xHigh: (io.deephaven.time.DateTime[]) - high value in x dimension
:param y: (java.util.List<T3>) - y-values
:param yLow: (java.util.List<T4>) - low value in y dimension
:param yHigh: (java.util.List<T5>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 24*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (T3[]) - y-values
:param yLow: (T4[]) - low value in y dimension
:param yHigh: (T5[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 25*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (double[]) - y-values
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 26*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (float[]) - y-values
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 27*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (int[]) - y-values
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 28*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (long[]) - y-values
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 29*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 30*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (short[]) - y-values
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 31*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param xLow: (java.util.Date[]) - low value in x dimension
:param xHigh: (java.util.Date[]) - high value in x dimension
:param y: (java.util.List<T3>) - y-values
:param yLow: (java.util.List<T4>) - low value in y dimension
:param yHigh: (java.util.List<T5>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 32*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param xLow: (short[]) - low value in x dimension
:param xHigh: (short[]) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 33*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param xLow: (short[]) - low value in x dimension
:param xHigh: (short[]) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 34*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param xLow: (short[]) - low value in x dimension
:param xHigh: (short[]) - high value in x dimension
:param y: (short[]) - y-values
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 35*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param xLow: (java.util.List<T1>) - low value in x dimension
:param xHigh: (java.util.List<T2>) - high value in x dimension
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 36*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param xLow: (java.util.List<T1>) - low value in x dimension
:param xHigh: (java.util.List<T2>) - high value in x dimension
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 37*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param xLow: (java.util.List<T1>) - low value in x dimension
:param xHigh: (java.util.List<T2>) - high value in x dimension
:param y: (java.util.List<T3>) - y-values
:param yLow: (java.util.List<T4>) - low value in y dimension
:param yHigh: (java.util.List<T5>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 38*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param xLow: (java.lang.String) - column in t that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in t that holds the high value in the x dimension
:param y: (java.lang.String) - column in t that holds the y-variable data
:param yLow: (java.lang.String) - column in t that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in t that holds the high value in the y dimension
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 39*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param xLow: (java.lang.String) - column in sds that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in sds that holds the high value in the x dimension
:param y: (java.lang.String) - column in sds that holds the y-variable data
:param yLow: (java.lang.String) - column in sds that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in sds that holds the high value in the y dimension
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.errorBarXY(*args))
@_convertArguments
def errorBarXYBy(self, *args):
"""
Creates an errorBar plot per distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param xLow: (java.lang.String) - column in t that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in t that holds the high value in the x dimension
:param y: (java.lang.String) - column in t that holds the y-variable data
:param yLow: (java.lang.String) - column in t that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in t that holds the high value in the y dimension
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param xLow: (java.lang.String) - column in sds that holds the low value in the x dimension
:param xHigh: (java.lang.String) - column in sds that holds the high value in the x dimension
:param y: (java.lang.String) - column in sds that holds the y-variable data
:param yLow: (java.lang.String) - column in sds that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in sds that holds the high value in the y dimension
:param byColumns: (java.lang.String...) - column(s) in sds that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.errorBarXYBy(*args))
@_convertArguments
def errorBarY(self, *args):
"""
Creates an XY plot with error bars in the y direction.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (T1[]) - y-values
:param yLow: (T2[]) - low value in y dimension
:param yHigh: (T3[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 2*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 3*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 4*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (double[]) - y-values
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 5*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 6*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 7*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (float[]) - y-values
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 8*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 9*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 10*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (int[]) - y-values
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 11*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 12*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 13*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (long[]) - y-values
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 14*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 15*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (T1[]) - y-values
:param yLow: (T2[]) - low value in y dimension
:param yHigh: (T3[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (double[]) - y-values
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 18*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (float[]) - y-values
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 19*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (int[]) - y-values
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 20*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (long[]) - y-values
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 21*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 22*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (short[]) - y-values
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 23*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (java.util.List<T1>) - y-values
:param yLow: (java.util.List<T2>) - low value in y dimension
:param yHigh: (java.util.List<T3>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 24*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (T1[]) - y-values
:param yLow: (T2[]) - low value in y dimension
:param yHigh: (T3[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 25*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (double[]) - y-values
:param yLow: (double[]) - low value in y dimension
:param yHigh: (double[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 26*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (float[]) - y-values
:param yLow: (float[]) - low value in y dimension
:param yHigh: (float[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 27*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (int[]) - y-values
:param yLow: (int[]) - low value in y dimension
:param yHigh: (int[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 28*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (long[]) - y-values
:param yLow: (long[]) - low value in y dimension
:param yHigh: (long[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 29*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 30*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (short[]) - y-values
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 31*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (java.util.List<T1>) - y-values
:param yLow: (java.util.List<T2>) - low value in y dimension
:param yHigh: (java.util.List<T3>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 32*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 33*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 34*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (short[]) - y-values
:param yLow: (short[]) - low value in y dimension
:param yHigh: (short[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 35*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:param yLow: (io.deephaven.time.DateTime[]) - low value in y dimension
:param yHigh: (io.deephaven.time.DateTime[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 36*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (java.util.Date[]) - y-values
:param yLow: (java.util.Date[]) - low value in y dimension
:param yHigh: (java.util.Date[]) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 37*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (java.util.List<T1>) - y-values
:param yLow: (java.util.List<T2>) - low value in y dimension
:param yHigh: (java.util.List<T3>) - high value in y dimension
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 38*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param y: (java.lang.String) - column in t that holds the y-variable data
:param yLow: (java.lang.String) - column in t that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in t that holds the high value in the y dimension
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 39*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param y: (java.lang.String) - column in sds that holds the y-variable data
:param yLow: (java.lang.String) - column in sds that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in sds that holds the high value in the y dimension
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.errorBarY(*args))
@_convertArguments
def errorBarYBy(self, *args):
"""
Creates a errorBarY plot per distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param y: (java.lang.String) - column in t that holds the y-variable data
:param yLow: (java.lang.String) - column in t that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in t that holds the high value in the y dimension
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param y: (java.lang.String) - column in sds that holds the y-variable data
:param yLow: (java.lang.String) - column in sds that holds the low value in the y dimension
:param yHigh: (java.lang.String) - column in sds that holds the high value in the y dimension
:param byColumns: (java.lang.String...) - column(s) in sds that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.errorBarYBy(*args))
@_convertArguments
def figureRemoveSeries(self, *names):
"""
Removes all series with names from this Figure.
:param names: (java.lang.String...) - series names
:return: (io.deephaven.plot.Figure) this Figure
"""
return FigureWrapper(figure=self.figure.figureRemoveSeries(*names))
@_convertArguments
def figureTitle(self, title):
"""
Sets the title of this Figure
:param title: (java.lang.String) - title
:return: (io.deephaven.plot.Figure) this Figure
"""
return FigureWrapper(figure=self.figure.figureTitle(title))
@_convertArguments
def figureTitleColor(self, color):
"""
Sets the color of this Figure's title
*Overload 1*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this Figure
*Overload 2*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this Figure
"""
return FigureWrapper(figure=self.figure.figureTitleColor(color))
@_convertArguments
def figureTitleFont(self, *args):
"""
Sets the font of this Figure's title
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Figure
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Figure
"""
return FigureWrapper(figure=self.figure.figureTitleFont(*args))
@_convertArguments
def funcNPoints(self, npoints):
"""
Sets the number of data points in this dataset.
:param npoints: (int) - number of points
:return: (io.deephaven.plot.Figure) this data series with the specified number of points.
"""
return FigureWrapper(figure=self.figure.funcNPoints(npoints))
@_convertArguments
def funcRange(self, *args):
"""
Sets the data range for this series.
*Overload 1*
:param xmin: (double) - range minimum
:param xmax: (double) - range maximum
:return: (io.deephaven.plot.Figure) this data series with the new range
*Overload 2*
:param xmin: (double) - range minimum
:param xmax: (double) - range maximum
:param npoints: (int) - number of data points
:return: (io.deephaven.plot.Figure) this data series with the new range
"""
return FigureWrapper(figure=self.figure.funcRange(*args))
@_convertArguments
def gradientVisible(self, *args):
"""
Sets whether bar gradients are visible.
*Overload 1*
:param visible: (boolean) - bar gradient visibility
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param visible: boolean
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.gradientVisible(*args))
@_convertArguments
def gridLinesVisible(self, visible):
"""
Sets whether the Chart has grid lines.
:param visible: (boolean) - whether the Chart's grid lines are drawn
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.gridLinesVisible(visible))
@_convertArguments
def group(self, *args):
"""
Sets the group for this dataset.
*Overload 1*
:param group: int
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param group: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.group(*args))
@_convertArguments
def histPlot(self, *args):
"""
Creates a histogram.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param counts: (io.deephaven.engine.table.Table) - table
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 3*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 4*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 5*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 6*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 7*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 8*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - data
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 9*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param columnName: (java.lang.String) - column in t
:param nbins: (int) - number of bins in the resulting histogram
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 10*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param columnName: (java.lang.String) - column in sds
:param nbins: (int) - number of bins in the resulting histogram
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 11*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 12*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 13*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 14*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 15*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 17*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - data
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 18*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param columnName: (java.lang.String) - column in t
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins in the resulting histogram
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 19*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param columnName: (java.lang.String) - column in sds
:param rangeMin: (double) - minimum of the range
:param rangeMax: (double) - maximum of the range
:param nbins: (int) - number of bins in the resulting histogram
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.histPlot(*args))
@_convertArguments
def invert(self, *args):
"""
Inverts this Axis so that larger values are closer to the origin.
*Overload 1*
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param invert: (boolean) - if true, larger values will be closer to the origin; otherwise, smaller values will be closer to
the origin.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.invert(*args))
@_convertArguments
def legendColor(self, color):
"""
Sets the color of the text inside the Chart's legend.
*Overload 1*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this Chart
*Overload 2*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.legendColor(color))
@_convertArguments
def legendFont(self, *args):
"""
Sets the font of this Chart's legend.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Chart
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.legendFont(*args))
@_convertArguments
def legendVisible(self, visible):
"""
Sets whether the Chart's legend is shown or hidden.
:param visible: (boolean) - whether the Chart's legend is shown or hidden
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.legendVisible(visible))
@_convertArguments
def lineColor(self, *args):
"""
Defines the default line color.
*Overload 1*
:param color: (int) - color palette index
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param color: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 4*
:param color: io.deephaven.gui.color.Paint
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 6*
:param color: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.lineColor(*args))
@_convertArguments
def lineStyle(self, *args):
"""
Sets the line style.
*Overload 1*
:param style: (io.deephaven.plot.LineStyle) - style
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param style: io.deephaven.plot.LineStyle
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.lineStyle(*args))
@_convertArguments
def linesVisible(self, *args):
"""
Sets whether lines are visible.
*Overload 1*
:param visible: (java.lang.Boolean) - line visibility
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param visible: java.lang.Boolean
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.linesVisible(*args))
@_convertArguments
def log(self):
"""
Sets the AxisTransform as log base 10.
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.log())
@_convertArguments
def max(self, *args):
"""
Sets the maximum range of this Axis.
*Overload 1*
:param max: (double) - maximum of the range
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset
:param valueColumn: (java.lang.String) - column in sds, where the maximum value is stored in row 0.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.max(*args))
@_convertArguments
def maxRowsInTitle(self, maxRowsCount):
"""
Sets the maximum row values that will be shown in title.
If total rows < maxRowsCount, then all the values will be shown separated by comma, otherwise just
maxRowsCount values will be shown along with ellipsis.
if maxRowsCount is < 0, all values will be shown.
if maxRowsCount is 0, then just first value will be shown without ellipsis.
The default is 0.
:param maxRowsCount: (int) - maximum number of row values to show in chart title
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.maxRowsInTitle(maxRowsCount))
@_convertArguments
def min(self, *args):
"""
Sets the minimum range of this Axis.
*Overload 1*
:param min: (double) - minimum of the range
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable dataset
:param valueColumn: (java.lang.String) - column in sds, where the minimum value is stored in row 0.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.min(*args))
@_convertArguments
def minorTicks(self, count):
"""
Sets the number of minor ticks between consecutive major ticks. These minor ticks are equally spaced.
:param count: (int) - number of minor ticks between consecutive major ticks.
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.minorTicks(count))
@_convertArguments
def minorTicksVisible(self, visible):
"""
Sets whether minor ticks are drawn on this Axis.
:param visible: (boolean) - whether minor ticks are drawn on this Axis
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.minorTicksVisible(visible))
@_convertArguments
def newAxes(self, *args):
"""
Creates new Axes on this Chart.
*Overload 1*
:return: (io.deephaven.plot.Figure) newly created Axes with dimension 2 on this Chart
*Overload 2*
:param name: (java.lang.String) - name for the axes
:return: (io.deephaven.plot.Figure) newly created Axes with dimension 2 on this Chart
*Overload 3*
:param dim: (int) - dimensions of the Axes
:return: (io.deephaven.plot.Figure) newly created Axes with dimension dim on this Chart
*Overload 4*
:param name: (java.lang.String) - name for the axes
:param dim: (int) - dimensions of the Axes
:return: (io.deephaven.plot.Figure) newly created Axes with dimension dim on this Chart
"""
return FigureWrapper(figure=self.figure.newAxes(*args))
@_convertArguments
def newChart(self, *args):
"""
Adds a new Chart to this figure.
*Overload 1*
:return: (io.deephaven.plot.Figure) the new Chart. The Chart is placed in the next available grid space, starting at the
upper left hand corner of the grid, going left to right, top to bottom. If no available space is found in
the grid:
* if this Figure was created with no specified grid size, then the Figure will resize itself to add the
new Chart;
* if not, a RuntimeException will be thrown.
*Overload 2*
:param index: (int) - index from the Figure's grid to remove. The index starts at 0 in the upper left hand corner of the
grid and increases going left to right, top to bottom. E.g. for a 2x2 Figure, the indices would be [0, 1]
[2, 3].
:return: (io.deephaven.plot.Figure) the new Chart. The Chart is placed at the grid space indicated by the index.
*Overload 3*
:param rowNum: (int) - row index in this Figure's grid. The row index starts at 0.
:param colNum: (int) - column index in this Figure's grid. The column index starts at 0.
:return: (io.deephaven.plot.Figure) the new Chart. The Chart is placed at the grid space [rowNum, colNum.
"""
return FigureWrapper(figure=self.figure.newChart(*args))
@_convertArguments
def ohlcPlot(self, *args):
"""
Creates an open-high-low-close plot.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (T1[]) - open data
:param high: (T2[]) - high data
:param low: (T3[]) - low data
:param close: (T4[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (double[]) - open data
:param high: (double[]) - high data
:param low: (double[]) - low data
:param close: (double[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 3*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (float[]) - open data
:param high: (float[]) - high data
:param low: (float[]) - low data
:param close: (float[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 4*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (int[]) - open data
:param high: (int[]) - high data
:param low: (int[]) - low data
:param close: (int[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 5*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (long[]) - open data
:param high: (long[]) - high data
:param low: (long[]) - low data
:param close: (long[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 6*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (short[]) - open data
:param high: (short[]) - high data
:param low: (short[]) - low data
:param close: (short[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 7*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.time.DateTime[]) - time data
:param open: (java.util.List<T1>) - open data
:param high: (java.util.List<T2>) - high data
:param low: (java.util.List<T3>) - low data
:param close: (java.util.List<T4>) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 8*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (T1[]) - open data
:param high: (T2[]) - high data
:param low: (T3[]) - low data
:param close: (T4[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 9*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (double[]) - open data
:param high: (double[]) - high data
:param low: (double[]) - low data
:param close: (double[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 10*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (float[]) - open data
:param high: (float[]) - high data
:param low: (float[]) - low data
:param close: (float[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 11*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (int[]) - open data
:param high: (int[]) - high data
:param low: (int[]) - low data
:param close: (int[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 12*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (long[]) - open data
:param high: (long[]) - high data
:param low: (long[]) - low data
:param close: (long[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 13*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (short[]) - open data
:param high: (short[]) - high data
:param low: (short[]) - low data
:param close: (short[]) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 14*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (java.util.Date[]) - time data
:param open: (java.util.List<T1>) - open data
:param high: (java.util.List<T2>) - high data
:param low: (java.util.List<T3>) - low data
:param close: (java.util.List<T4>) - close data
:return: (io.deephaven.plot.Figure) dataset created by the plot
*Overload 15*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param time: (io.deephaven.plot.datasets.data.IndexableNumericData) - time data
:param open: (io.deephaven.plot.datasets.data.IndexableNumericData) - open data
:param high: (io.deephaven.plot.datasets.data.IndexableNumericData) - high data
:param low: (io.deephaven.plot.datasets.data.IndexableNumericData) - low data
:param close: (io.deephaven.plot.datasets.data.IndexableNumericData) - close data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param timeCol: (java.lang.String) - column in t that holds the time data
:param openCol: (java.lang.String) - column in t that holds the open data
:param highCol: (java.lang.String) - column in t that holds the high data
:param lowCol: (java.lang.String) - column in t that holds the low data
:param closeCol: (java.lang.String) - column in t that holds the close data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param timeCol: (java.lang.String) - column in sds that holds the time data
:param openCol: (java.lang.String) - column in sds that holds the open data
:param highCol: (java.lang.String) - column in sds that holds the high data
:param lowCol: (java.lang.String) - column in sds that holds the low data
:param closeCol: (java.lang.String) - column in sds that holds the close data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.ohlcPlot(*args))
@_convertArguments
def ohlcPlotBy(self, *args):
"""
Creates an open-high-low-close plot per distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param timeCol: (java.lang.String) - column in t that holds the time data
:param openCol: (java.lang.String) - column in t that holds the open data
:param highCol: (java.lang.String) - column in t that holds the high data
:param lowCol: (java.lang.String) - column in t that holds the low data
:param closeCol: (java.lang.String) - column in t that holds the close data
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param timeCol: (java.lang.String) - column in sds that holds the time data
:param openCol: (java.lang.String) - column in sds that holds the open data
:param highCol: (java.lang.String) - column in sds that holds the high data
:param lowCol: (java.lang.String) - column in sds that holds the low data
:param closeCol: (java.lang.String) - column in sds that holds the close data
:param byColumns: (java.lang.String...) - column(s) in sds that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.ohlcPlotBy(*args))
@_convertArguments
def piePercentLabelFormat(self, *args):
"""
Sets the format of the percentage point label format in pie plots.
*Overload 1*
:param format: (java.lang.String) - format
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param format: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.piePercentLabelFormat(*args))
@_convertCatPlotArguments
def piePlot(self, *args):
"""
**Incompatible overloads text - text from the first overload:**
Creates a pie plot.
Categorical data must not have duplicates.
*Overload 1*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (T1[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (double[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 3*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (float[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 4*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (int[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 5*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (long[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 6*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (short[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 7*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (T0[]) - categories
:param values: (java.util.List<T1>) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 8*
Note: Java generics information - <T1 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (io.deephaven.plot.datasets.data.IndexableData<T1>) - categories
:param values: (io.deephaven.plot.datasets.data.IndexableNumericData) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 9*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (T1[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 10*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (double[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 11*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (float[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 12*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (int[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 13*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (long[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 14*
Note: Java generics information - <T0 extends java.lang.Comparable>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (short[]) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 15*
Note: Java generics information - <T0 extends java.lang.Comparable,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param categories: (java.util.List<T0>) - categories
:param values: (java.util.List<T1>) - data values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param categories: (java.lang.String) - column in t with categorical data
:param values: (java.lang.String) - column in t with numerical data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param categories: (java.lang.String) - column in sds with categorical data
:param values: (java.lang.String) - column in sds with numerical data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.piePlot(*args))
@_convertArguments
def plot(self, *args):
"""
Creates an XY plot.
*Overload 1*
Note: Java generics information - <T extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param function: (groovy.lang.Closure<T>) - function to plot
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param function: (java.util.function.DoubleUnaryOperator) - function to plot
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 3*
Note: Java generics information - <T0 extends java.lang.Number,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 4*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 5*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 6*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 7*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 8*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 9*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 10*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 11*
Note: Java generics information - <T0 extends java.lang.Number,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (T0[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 12*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 13*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 14*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 15*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 16*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 17*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 18*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 19*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 20*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (double[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 21*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 22*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 23*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 24*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 25*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 26*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 27*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 28*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 29*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (float[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 30*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 31*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 32*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 33*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 34*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 35*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 36*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 37*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 38*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (int[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 39*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 40*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 41*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 42*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 43*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 44*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 45*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 46*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 47*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (long[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 48*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 49*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 50*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 51*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 52*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 53*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 54*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 55*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 56*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.time.DateTime[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 57*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 58*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 59*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 60*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 61*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 62*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 63*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 64*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 65*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.Date[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 66*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 67*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 68*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 69*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 70*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 71*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 72*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 73*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 74*
Note: Java generics information - <T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (short[]) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 75*
Note: Java generics information - <T0 extends java.lang.Number,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (T1[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 76*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (double[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 77*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (float[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 78*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (int[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 79*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (long[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 80*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (io.deephaven.time.DateTime[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 81*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (java.util.Date[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 82*
Note: Java generics information - <T0 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (short[]) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 83*
Note: Java generics information - <T0 extends java.lang.Number,
T1 extends java.lang.Number>
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (java.util.List<T0>) - x-values
:param y: (java.util.List<T1>) - y-values
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 84*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param y: (java.lang.String) - column in t that holds the y-variable data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 85*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param y: (java.lang.String) - column in sds that holds the y-variable data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 86*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param x: (io.deephaven.plot.datasets.data.IndexableNumericData) - x-values
:param y: (io.deephaven.plot.datasets.data.IndexableNumericData) - y-values
:param hasXTimeAxis: (boolean) - whether to treat the x-values as time data
:param hasYTimeAxis: (boolean) - whether to treat the y-values as time data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.plot(*args))
@_convertArguments
def plotBy(self, *args):
"""
Creates an XY plot per distinct grouping value specified in byColumns.
*Overload 1*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param t: (io.deephaven.engine.table.Table) - table
:param x: (java.lang.String) - column in t that holds the x-variable data
:param y: (java.lang.String) - column in t that holds the y-variable data
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
*Overload 2*
:param seriesName: (java.lang.Comparable) - name of the created dataset
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param x: (java.lang.String) - column in sds that holds the x-variable data
:param y: (java.lang.String) - column in sds that holds the y-variable data
:param byColumns: (java.lang.String...) - column(s) in t that holds the grouping data
:return: (io.deephaven.plot.Figure) dataset created for plot
"""
return FigureWrapper(figure=self.figure.plotBy(*args))
@_convertArguments
def plotOrientation(self, orientation):
"""
Sets the orientation of plots in this Chart.
:param orientation: (java.lang.String) - plot orientation
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.plotOrientation(orientation))
@_convertArguments
def plotStyle(self, style):
"""
Sets the PlotStyle of this Axes.
*Overload 1*
:param style: (io.deephaven.plot.PlotStyle) - style
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param style: (java.lang.String) - style
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.plotStyle(style))
@_convertArguments
def pointColor(self, *args):
"""
Sets the point color. Unspecified points use the default color.
*Overload 1*
:param color: (int) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param color: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param colors: (int...) - color palette indices. The color for data point i comes from index i. A value of 3 corresponds to
the 3rd color from the color pallette.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 4*
:param colors: int[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
:param t: (io.deephaven.engine.table.Table) - table containing colors
:param columnName: (java.lang.String) - column in t containing colors. The color data for point i comes from row i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 6*
:param t: io.deephaven.engine.table.Table
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 7*
:param t: (io.deephaven.engine.table.Table) - table
:param keyColumn: (java.lang.String) - column in t, specifying category values
:param valueColumn: (java.lang.String) - column in t, specifying Paints or ints/Integers representing color palette
values.
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 8*
:param t: io.deephaven.engine.table.Table
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 9*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 10*
:param color: io.deephaven.gui.color.Paint
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 11*
:param colors: (io.deephaven.gui.color.Paint...) - colors. The color for data point i comes from index i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 12*
:param colors: io.deephaven.gui.color.Paint[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 13*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing colors
:param columnName: (java.lang.String) - column in sds containing colors. The color data for point i comes from row i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 14*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 15*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param keyColumn: (java.lang.String) - column in sds, specifying category values
:param valueColumn: (java.lang.String) - column in sds, specifying Paints or ints/Integers representing color palette
values.
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 16*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 17*
:param category: (java.lang.Comparable) - data point
:param color: (int) - index of the color in the series color palette
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 18*
:param category: java.lang.Comparable
:param color: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 19*
:param category: (java.lang.Comparable) - data point
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 20*
:param category: java.lang.Comparable
:param color: io.deephaven.gui.color.Paint
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 21*
:param category: (java.lang.Comparable) - data point
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 22*
:param category: java.lang.Comparable
:param color: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 23*
:param colors: (java.lang.Integer...) - color palette indices. The color for data point i comes from index i. A value of 3 corresponds to
the 3rd color from the color pallette.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 24*
:param colors: java.lang.Integer[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 25*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 26*
:param color: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 27*
:param colors: (java.lang.String...) - color names. The color for data point i comes from index i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 28*
:param colors: java.lang.String[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 29*
Note: Java generics information - <CATEGORY extends java.lang.Comparable,
COLOR extends io.deephaven.gui.color.Paint>
:param colors: (java.util.Map<CATEGORY,COLOR>) - map from data points to their Paints
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 30*
Note: Java generics information - <CATEGORY extends java.lang.Comparable,
COLOR extends io.deephaven.gui.color.Paint>
:param colors: java.util.Map<CATEGORY,COLOR>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 31*
Note: Java generics information - <COLOR extends io.deephaven.gui.color.Paint>
:param colors: (groovy.lang.Closure<COLOR>) - closure from data points to their Paints
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 32*
Note: Java generics information - <COLOR extends io.deephaven.gui.color.Paint>
:param colors: groovy.lang.Closure<COLOR>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 33*
Note: Java generics information - <COLOR extends io.deephaven.gui.color.Paint>
:param colors: (java.util.function.Function<java.lang.Comparable,COLOR>) - function from data points to their Paints
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 34*
Note: Java generics information - <COLOR extends io.deephaven.gui.color.Paint>
:param colors: java.util.function.Function<java.lang.Comparable,COLOR>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 35*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: (io.deephaven.plot.datasets.data.IndexableData<T>) - colors. The color for data point i comes from index i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 36*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: io.deephaven.plot.datasets.data.IndexableData<T>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointColor(*args))
@_convertArguments
def pointColorByY(self, *args):
"""
Sets the point color for a data point based upon the y-value.
*Overload 1*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: (groovy.lang.Closure<T>) - function from the y-value of data points to Paint
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 2*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: groovy.lang.Closure<T>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: (java.util.Map<java.lang.Double,T>) - map from the y-value of data points to Paint
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 4*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: java.util.Map<java.lang.Double,T>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: (java.util.function.Function<java.lang.Double,T>) - function from the y-value of data points to Paint
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 6*
Note: Java generics information - <T extends io.deephaven.gui.color.Paint>
:param colors: java.util.function.Function<java.lang.Double,T>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointColorByY(*args))
@_convertArguments
def pointColorInteger(self, *args):
"""
Sets the point color. Unspecified points use the default color.
*Overload 1*
:param colors: (io.deephaven.plot.datasets.data.IndexableData<java.lang.Integer>) - colors. The color for data point i comes from index i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 2*
:param colors: io.deephaven.plot.datasets.data.IndexableData<java.lang.Integer>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param colors: (java.util.Map<CATEGORY,COLOR>) - map from data points to the index of the color palette
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 4*
:param colors: java.util.Map<CATEGORY,COLOR>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
Note: Java generics information - <COLOR extends java.lang.Integer>
:param colors: (groovy.lang.Closure<COLOR>) - closure from data points to the index of the color palette
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 6*
Note: Java generics information - <COLOR extends java.lang.Integer>
:param colors: groovy.lang.Closure<COLOR>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 7*
Note: Java generics information - <COLOR extends java.lang.Integer>
:param colors: (java.util.function.Function<java.lang.Comparable,COLOR>) - function from data points to the index of the color palette
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 8*
Note: Java generics information - <COLOR extends java.lang.Integer>
:param colors: java.util.function.Function<java.lang.Comparable,COLOR>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointColorInteger(*args))
@_convertArguments
def pointLabel(self, *args):
"""
Sets the point label for data point i from index i of the input labels. Points outside of these indices are
unlabeled.
*Overload 1*
:param t: (io.deephaven.engine.table.Table) - table containing labels
:param columnName: (java.lang.String) - column in t containing labels. The label data for point i comes from row i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 2*
:param t: io.deephaven.engine.table.Table
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param t: (io.deephaven.engine.table.Table) - table
:param keyColumn: (java.lang.String) - column in t, specifying category values
:param valueColumn: (java.lang.String) - column in t, specifying labels
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 4*
:param t: io.deephaven.engine.table.Table
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
:param labels: (io.deephaven.plot.datasets.data.IndexableData<?>) - labels
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 6*
:param labels: io.deephaven.plot.datasets.data.IndexableData<?>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 7*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing labels
:param columnName: (java.lang.String) - column in sds containing labels. The color data for point i comes from row i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 8*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 9*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param keyColumn: (java.lang.String) - column in sds, specifying category values
:param valueColumn: (java.lang.String) - column in sds, specifying labels
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 10*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 11*
:param category: (java.lang.Comparable) - category value
:param label: (java.lang.Object) - label
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 12*
:param category: java.lang.Comparable
:param label: java.lang.Object
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 13*
:param label: (java.lang.Object) - label
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 14*
:param label: java.lang.Object
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 15*
:param labels: (java.lang.Object...) - labels
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 16*
:param labels: java.lang.Object[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 17*
Note: Java generics information - <CATEGORY extends java.lang.Comparable,
LABEL>
:param labels: (java.util.Map<CATEGORY,LABEL>) - map used to determine point labels
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 18*
Note: Java generics information - <CATEGORY extends java.lang.Comparable,
LABEL>
:param labels: java.util.Map<CATEGORY,LABEL>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 19*
Note: Java generics information - <LABEL>
:param labels: (groovy.lang.Closure<LABEL>) - closure used to determine point labels for input categories
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 20*
Note: Java generics information - <LABEL>
:param labels: groovy.lang.Closure<LABEL>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 21*
Note: Java generics information - <LABEL>
:param labels: (java.util.function.Function<java.lang.Comparable,LABEL>) - function used to determine point labels
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 22*
Note: Java generics information - <LABEL>
:param labels: java.util.function.Function<java.lang.Comparable,LABEL>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointLabel(*args))
@_convertArguments
def pointLabelFormat(self, *args):
"""
Sets the point label format.
Use {0} where the data series name should be inserted, {1} for the x-value and {2} y-value e.g. "{0}: ({1}, {2})"
will display as Series1: (2.0, 5.5).
*Overload 1*
:param format: (java.lang.String) - format
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param format: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointLabelFormat(*args))
@_convertArguments
def pointShape(self, *args):
"""
Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use
default shapes.
*Overload 1*
:param shapes: (groovy.lang.Closure<java.lang.String>) - closure used to determine point shapes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 2*
:param shapes: groovy.lang.Closure<java.lang.String>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param t: (io.deephaven.engine.table.Table) - table containing shapes
:param columnName: (java.lang.String) - column in t containing shapes. The shape data for point i comes from row i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 4*
:param t: io.deephaven.engine.table.Table
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
:param t: (io.deephaven.engine.table.Table) - table
:param keyColumn: (java.lang.String) - column in t, specifying category values
:param valueColumn: (java.lang.String) - column in t, specifying shapes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 6*
:param t: io.deephaven.engine.table.Table
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 7*
:param shape: (io.deephaven.gui.shape.Shape) - shape
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 8*
:param shape: io.deephaven.gui.shape.Shape
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 9*
:param shapes: (io.deephaven.gui.shape.Shape...) - shapes
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 10*
:param shapes: io.deephaven.gui.shape.Shape[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 11*
:param shapes: (io.deephaven.plot.datasets.data.IndexableData<java.lang.String>) - shapes
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 12*
:param shapes: io.deephaven.plot.datasets.data.IndexableData<java.lang.String>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 13*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing shapes
:param columnName: (java.lang.String) - column in sds containing shapes. The color data for point i comes from row i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 14*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 15*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param keyColumn: (java.lang.String) - column in sds, specifying category values
:param valueColumn: (java.lang.String) - column in sds, specifying shapes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 16*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 17*
:param category: (java.lang.Comparable) - category value
:param shape: (io.deephaven.gui.shape.Shape) - shape
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 18*
:param category: java.lang.Comparable
:param shape: io.deephaven.gui.shape.Shape
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 19*
:param category: (java.lang.Comparable) - category value
:param shape: (java.lang.String) - shape
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 20*
:param category: java.lang.Comparable
:param shape: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 21*
:param shape: (java.lang.String) - shape
:return: (io.deephaven.plot.Figure) this DataSeries
*Overload 22*
:param shape: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 23*
:param shapes: (java.lang.String...) - shapes
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 24*
:param shapes: java.lang.String[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 25*
:param shapes: (java.util.function.Function<java.lang.Comparable,java.lang.String>) - function used to determine point shapes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 26*
:param shapes: java.util.function.Function<java.lang.Comparable,java.lang.String>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 27*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param shapes: (java.util.Map<CATEGORY,java.lang.String>) - map used to determine point shapes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 28*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param shapes: java.util.Map<CATEGORY,java.lang.String>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointShape(*args))
@_convertArguments
def pointSize(self, *args):
"""
Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size.
Unspecified points use the default size.
*Overload 1*
:param factor: (double) - point size
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param factors: (double...) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 3*
:param factors: double[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 4*
:param factor: (int) - point size
:return: (io.deephaven.plot.Figure) this data series.
*Overload 5*
:param factors: (int...) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 6*
:param factors: int[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 7*
:param t: (io.deephaven.engine.table.Table) - table containing factors to multiply the default size (1) by
:param columnName: (java.lang.String) - column in t containing size scaling factors. The size data for point i comes from row
i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 8*
:param t: io.deephaven.engine.table.Table
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 9*
:param t: (io.deephaven.engine.table.Table) - table
:param keyColumn: (java.lang.String) - column in t, specifying category values
:param valueColumn: (java.lang.String) - column in t, specifying point sizes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 10*
:param t: io.deephaven.engine.table.Table
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 11*
:param factors: (io.deephaven.plot.datasets.data.IndexableData<java.lang.Double>) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 12*
:param factors: io.deephaven.plot.datasets.data.IndexableData<java.lang.Double>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 13*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing factors to multiply the default size
(1) by
:param columnName: (java.lang.String) - column in sds containing size scaling factors. The size data for point i comes from row
i.
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 14*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param columnName: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 15*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param keyColumn: (java.lang.String) - column in sds, specifying category values
:param valueColumn: (java.lang.String) - column in sds, specifying point sizes
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 16*
:param sds: io.deephaven.plot.filters.SelectableDataSet
:param keyColumn: java.lang.String
:param valueColumn: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 17*
:param category: (java.lang.Comparable) - data point
:param factor: (double) - factor to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 18*
:param category: java.lang.Comparable
:param factor: double
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 19*
:param category: (java.lang.Comparable) - data point
:param factor: (int) - factor to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 20*
:param category: java.lang.Comparable
:param factor: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 21*
:param category: (java.lang.Comparable) - data point
:param factor: (java.lang.Number) - factor to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 22*
:param category: java.lang.Comparable
:param factor: java.lang.Number
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 23*
:param category: (java.lang.Comparable) - data point
:param factor: (long) - factor to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 24*
:param category: java.lang.Comparable
:param factor: long
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 25*
:param factor: (java.lang.Number) - point size
:return: (io.deephaven.plot.Figure) this data series.
*Overload 26*
:param factor: java.lang.Number
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 27*
:param factor: (long) - point size
:return: (io.deephaven.plot.Figure) this data series.
*Overload 28*
:param factors: (long...) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 29*
:param factors: long[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 30*
:param categories: (CATEGORY[]) - data points
:param factors: (NUMBER[]) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 31*
:param categories: CATEGORY[]
:param factors: NUMBER[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 32*
:param factors: (java.util.Map<CATEGORY,NUMBER>) - map used to set sizes of specific data points
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 33*
:param factors: java.util.Map<CATEGORY,NUMBER>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 34*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param categories: (CATEGORY[]) - data points
:param factors: (double[]) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 35*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param categories: CATEGORY[]
:param factors: double[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 36*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param categories: (CATEGORY[]) - data points
:param factors: (int[]) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 37*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param categories: CATEGORY[]
:param factors: int[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 38*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param categories: (CATEGORY[]) - data points
:param factors: (long[]) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 39*
Note: Java generics information - <CATEGORY extends java.lang.Comparable>
:param categories: CATEGORY[]
:param factors: long[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 40*
Note: Java generics information - <NUMBER extends java.lang.Number>
:param factors: (groovy.lang.Closure<NUMBER>) - closure used to set sizes of data points
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 41*
Note: Java generics information - <NUMBER extends java.lang.Number>
:param factors: groovy.lang.Closure<NUMBER>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 42*
Note: Java generics information - <NUMBER extends java.lang.Number>
:param factors: (java.util.function.Function<java.lang.Comparable,NUMBER>) - function used to set sizes of data points
:return: (io.deephaven.plot.Figure) this CategoryDataSeries
*Overload 43*
Note: Java generics information - <NUMBER extends java.lang.Number>
:param factors: java.util.function.Function<java.lang.Comparable,NUMBER>
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 44*
Note: Java generics information - <T extends java.lang.Number>
:param factors: (T[]) - factors to multiply the default size (1) by
:return: (io.deephaven.plot.Figure) this XYDataSeries
*Overload 45*
Note: Java generics information - <T extends java.lang.Number>
:param factors: T[]
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointSize(*args))
@_convertArguments
def pointsVisible(self, *args):
"""
Sets whether points are visible.
*Overload 1*
:param visible: (java.lang.Boolean) - point visibility
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param visible: java.lang.Boolean
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.pointsVisible(*args))
@_convertArguments
def range(self, min, max):
"""
Sets the range of this Axis to [min, max] inclusive.
:param min: (double) - minimum of the range
:param max: (double) - maximum of the range
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.range(min, max))
@_convertArguments
def removeChart(self, *args):
"""
Removes a chart from the Figure's grid.
*Overload 1*
:param index: (int) - index from the Figure's grid to remove. The index starts at 0 in the upper left hand corner of the
grid and increases going left to right, top to bottom. E.g. for a 2x2 Figure, the indices would be [0, 1]
[2, 3].
:return: (io.deephaven.plot.Figure) this Figure with the chart removed.
*Overload 2*
:param rowNum: (int) - row index in this Figure's grid. The row index starts at 0.
:param colNum: (int) - column index in this Figure's grid. The column index starts at 0.
:return: (io.deephaven.plot.Figure) this Figure with the chart removed.
"""
return FigureWrapper(figure=self.figure.removeChart(*args))
@_convertArguments
def rowSpan(self, n):
"""
Sets the size of this Chart within the grid of the figure.
:param n: (int) - how many rows tall
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.rowSpan(n))
@_convertArguments
def save(self, *args):
"""
Saves the Figure as an image.
*Overload 1*
:param saveLocation: (java.lang.String) - save location. Must not be null
:return: (io.deephaven.plot.Figure) figure
*Overload 2*
:param saveLocation: (java.lang.String) - save location. Must not be null
:param width: (int) - image width
:param height: (int) - image height
:return: (io.deephaven.plot.Figure) figure
*Overload 3*
:param saveLocation: (java.lang.String) - save location. Must not be null
:param wait: (boolean) - whether to hold the calling thread until the file is written
:param timeoutSeconds: (long) - timeout in seconds to wait.
:return: (io.deephaven.plot.Figure) figure
*Overload 4*
:param saveLocation: (java.lang.String) - save location. Must not be null
:param width: (int) - image width
:param height: (int) - image height
:param wait: (boolean) - whether to hold the calling thread until the file is written
:param timeoutSeconds: (long) - timeout in seconds to wait.
:return: (io.deephaven.plot.Figure) figure
"""
return FigureWrapper(figure=self.figure.save(*args))
@_convertArguments
def series(self, *args):
"""
Gets a data series.
*Overload 1*
:param id: (int) - series id.
:return: (io.deephaven.plot.Figure) selected data series.
*Overload 2*
:param name: (java.lang.Comparable) - series name.
:return: (io.deephaven.plot.Figure) selected data series.
"""
return FigureWrapper(figure=self.figure.series(*args))
@_convertArguments
def seriesColor(self, *args):
"""
Defines the default line and point color.
*Overload 1*
:param color: (int) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param color: int
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 3*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 4*
:param color: io.deephaven.gui.color.Paint
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
*Overload 5*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this data series.
*Overload 6*
:param color: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.seriesColor(*args))
@_convertArguments
def seriesNamingFunction(self, namingFunction):
"""
Defines the procedure to name a generated series. The input of the naming function is the table map key
corresponding to the new series.
*Overload 1*
:param namingFunction: (groovy.lang.Closure<java.lang.String>) - series naming closure
:return: io.deephaven.plot.Figure
*Overload 2*
:param namingFunction: (java.util.function.Function<java.lang.Object,java.lang.String>) - series naming function
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.seriesNamingFunction(namingFunction))
@_convertArguments
def span(self, rowSpan, colSpan):
"""
Sets the size of this Chart within the grid of the figure.
:param rowSpan: (int) - how many rows tall
:param colSpan: (int) - how many columns wide
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.span(rowSpan, colSpan))
@_convertArguments
def tickLabelAngle(self, angle):
"""
Sets the angle the tick labels of this Axis are drawn at.
:param angle: (double) - angle in degrees
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.tickLabelAngle(angle))
@_convertArguments
def ticks(self, *args):
"""
Sets the tick locations.
*Overload 1*
:param tickLocations: (double[]) - coordinates of the major tick locations
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param gapBetweenTicks: (double) - the distance between ticks. For example, if gapBetweenTicks is 5.0, and the first
tick is at 10.0, the next will be drawn at 15.0.
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.ticks(*args))
@_convertArguments
def ticksFont(self, *args):
"""
Sets the font for this Axis's ticks.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.ticksFont(*args))
@_convertArguments
def ticksVisible(self, visible):
"""
Sets whether ticks are drawn on this Axis.
:param visible: (boolean) - whether ticks are drawn on this Axis
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.ticksVisible(visible))
@_convertArguments
def toolTipPattern(self, *args):
"""
Sets the tooltip format.
*Overload 1*
:param format: (java.lang.String) - format
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param format: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.toolTipPattern(*args))
@_convertArguments
def transform(self, transform):
"""
Sets the AxisTransform for this Axis.
:param transform: (io.deephaven.plot.axistransformations.AxisTransform) - transform
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.transform(transform))
@_convertArguments
def twin(self, *args):
"""
Creates a new Axes instance which shares the same Axis objects as this Axes. The resultant Axes has the
same range, ticks, etc. as this Axes (as these are fields of the Axis) but may have, for example, a
different PlotStyle.
*Overload 1*
:return: (io.deephaven.plot.Figure) the new Axes instance. The axes name will be equal to the string representation of the axes id.
*Overload 2*
:param name: (java.lang.String) - Name for the axes
:return: (io.deephaven.plot.Figure) the new Axes instance
*Overload 3*
:param dim: (int) - Axis dimension to share. The x-axis is dimension 0, y-axis is dimension 1.
:return: (io.deephaven.plot.Figure) the new Axes instance. The axes name will be equal to the string representation of the axes id.
*Overload 4*
:param name: (java.lang.String) - name for the axes
:param dim: (int) - Axis dimension to share. The x-axis is dimension 0, y-axis is dimension 1.
:return: (io.deephaven.plot.Figure) the new Axes instance
"""
return FigureWrapper(figure=self.figure.twin(*args))
@_convertArguments
def twinX(self, *args):
"""
Creates a new Axes instance which shares the same x-Axis as this Axes.
The resultant Axes has the same x-axis range, ticks, etc. as this Axes (as these are properties of the
Axis) but may have, for example, a different PlotStyle.
*Overload 1*
:return: (io.deephaven.plot.Figure) the new Axes instance. The axes name will be equal to the string representation of the axes id.
*Overload 2*
:param name: (java.lang.String) - Name for the axes
:return: (io.deephaven.plot.Figure) the new Axes instance
"""
return FigureWrapper(figure=self.figure.twinX(*args))
@_convertArguments
def twinY(self, *args):
"""
Creates a new Axes instance which shares the same y-Axis as this Axes.
The resultant Axes has the same y-axis range, ticks, etc. as this Axes (as these are properties of the
Axis) but may have, for example, a different PlotStyle.
*Overload 1*
:return: (io.deephaven.plot.Figure) the new Axes instance. The axes name will be equal to the string representation of the axes id.
*Overload 2*
:param name: (java.lang.String) - Name for the axes
:return: (io.deephaven.plot.Figure) the new Axes instance
"""
return FigureWrapper(figure=self.figure.twinY(*args))
@_convertArguments
def updateInterval(self, updateIntervalMillis):
"""
Sets the update interval of this Figure. The plot will be redrawn at this update interval.
:param updateIntervalMillis: (long) - update interval, in milliseconds
:return: (io.deephaven.plot.Figure) this Figure
"""
return FigureWrapper(figure=self.figure.updateInterval(updateIntervalMillis))
@_convertArguments
def xAxis(self):
"""
Gets the Axis representing the x-axis
:return: (io.deephaven.plot.Figure) x-dimension Axis
"""
return FigureWrapper(figure=self.figure.xAxis())
@_convertArguments
def xBusinessTime(self, *args):
"""
Sets the AxisTransform of the x-Axis as an AxisTransformBusinessCalendar.
*Overload 1*
:return: (io.deephaven.plot.Figure) this Axes using the default BusinessCalendar for the x-Axis.
*Overload 2*
:param calendar: (io.deephaven.time.calendar.BusinessCalendar) - business calendar for the AxisTransformBusinessCalendar
:return: (io.deephaven.plot.Figure) this Axes using the calendar for the x-Axis business calendar.
*Overload 3*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing the business calendar.
:param valueColumn: (java.lang.String) - name of a column containing String values, where each value is the name of a
BusinessCalendar.
:return: (io.deephaven.plot.Figure) this Axes using the business calendar from row 0 of the filtered sds for the x-Axis
business calendar. If no value is found, no transform will be applied.
"""
return FigureWrapper(figure=self.figure.xBusinessTime(*args))
@_convertArguments
def xColor(self, color):
"""
Sets the color of the x-Axis
*Overload 1*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xColor(color))
@_convertArguments
def xFormat(self, format):
"""
Sets the AxisFormat of the x-Axis
:param format: (io.deephaven.plot.axisformatters.AxisFormat) - format
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xFormat(format))
@_convertArguments
def xFormatPattern(self, pattern):
"""
Sets the format pattern of the x-Axis
:param pattern: (java.lang.String) - pattern
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xFormatPattern(pattern))
@_convertArguments
def xGridLinesVisible(self, visible):
"""
Sets whether the Chart has grid lines in the x direction.
:param visible: (boolean) - whether the Chart's x grid lines are drawn
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.xGridLinesVisible(visible))
@_convertArguments
def xInvert(self, *args):
"""
Inverts the x-Axis so that larger values are closer to the origin.
*Overload 1*
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param invert: (boolean) - if true, larger values will be closer to the origin
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xInvert(*args))
@_convertArguments
def xLabel(self, label):
"""
Sets the label of the x-Axis
:param label: (java.lang.String) - label
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xLabel(label))
@_convertArguments
def xLabelFont(self, *args):
"""
Sets the font for the x-Axis label.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.xLabelFont(*args))
@_convertArguments
def xLog(self):
"""
Sets the AxisTransform of the x-Axis to log base 10
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xLog())
@_convertArguments
def xMax(self, *args):
"""
Sets the maximum of the x-Axis.
*Overload 1*
:param max: (double) - maximum of the x-range
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param valueColumn: (java.lang.String) - column in sds. The value in row 0 is used for the maximum.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xMax(*args))
@_convertArguments
def xMin(self, *args):
"""
Sets the minimum of the x-Axis.
*Overload 1*
:param min: (double) - minimum of the x-range
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param valueColumn: (java.lang.String) - column in sds. The value in row 0 is used for the minimum.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xMin(*args))
@_convertArguments
def xMinorTicks(self, count):
"""
Sets the number of minor ticks between consecutive major ticks in the x-Axis. These minor ticks are
equally spaced.
:param count: (int) - number of minor ticks between consecutive major ticks.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xMinorTicks(count))
@_convertArguments
def xMinorTicksVisible(self, visible):
"""
Sets whether the x-Axis minor ticks are visible.
:param visible: (boolean) - whether the minor ticks are visible
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xMinorTicksVisible(visible))
@_convertArguments
def xRange(self, min, max):
"""
Sets the range of the x-Axis
:param min: (double) - minimum of the range
:param max: (double) - maximum of the range
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xRange(min, max))
@_convertArguments
def xTickLabelAngle(self, angle):
"""
Sets the angle the tick labels the x-Axis are drawn at.
:param angle: (double) - angle in degrees
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xTickLabelAngle(angle))
@_convertArguments
def xTicks(self, *args):
"""
Sets the x-Axis ticks.
*Overload 1*
:param tickLocations: (double[]) - locations of the major ticks
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param gapBetweenTicks: (double) - spacing between major ticks
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xTicks(*args))
@_convertArguments
def xTicksFont(self, *args):
"""
Sets the font for the x-Axis ticks.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.xTicksFont(*args))
@_convertArguments
def xTicksVisible(self, visible):
"""
Sets whether the x-Axis ticks are visible.
:param visible: (boolean) - whether the ticks are visible
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xTicksVisible(visible))
@_convertArguments
def xToolTipPattern(self, *args):
"""
Sets the x-value tooltip format.
*Overload 1*
:param format: (java.lang.String) - format
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param format: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.xToolTipPattern(*args))
@_convertArguments
def xTransform(self, transform):
"""
Sets the AxisTransform of the x-Axis
:param transform: (io.deephaven.plot.axistransformations.AxisTransform) - transform
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.xTransform(transform))
@_convertArguments
def yAxis(self):
"""
Gets the Axis representing the y-axis
:return: (io.deephaven.plot.Figure) y-dimension Axis
"""
return FigureWrapper(figure=self.figure.yAxis())
@_convertArguments
def yBusinessTime(self, *args):
"""
Sets the AxisTransform of the y-Axis as an AxisTransformBusinessCalendar.
*Overload 1*
:return: (io.deephaven.plot.Figure) this Axes using the default BusinessCalendar for the y-Axis.
*Overload 2*
:param calendar: (io.deephaven.time.calendar.BusinessCalendar) - business calendar for the AxisTransformBusinessCalendar
:return: (io.deephaven.plot.Figure) this Axes using the calendar for the y-Axis business calendar.
*Overload 3*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table) containing the business calendar.
:param valueColumn: (java.lang.String) - name of a column containing String values, where each value is the name of a
BusinessCalendar.
:return: (io.deephaven.plot.Figure) this Axes using the business calendar from row 0 of the filtered sds for the y-Axis
business calendar. If no value is found, no transform will be applied.
"""
return FigureWrapper(figure=self.figure.yBusinessTime(*args))
@_convertArguments
def yColor(self, color):
"""
Sets the color of the y-Axis
*Overload 1*
:param color: (java.lang.String) - color
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param color: (io.deephaven.gui.color.Paint) - color
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yColor(color))
@_convertArguments
def yFormat(self, format):
"""
Sets the AxisFormat of the y-Axis
:param format: (io.deephaven.plot.axisformatters.AxisFormat) - format
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yFormat(format))
@_convertArguments
def yFormatPattern(self, pattern):
"""
Sets the format pattern of the y-Axis
:param pattern: (java.lang.String) - pattern
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yFormatPattern(pattern))
@_convertArguments
def yGridLinesVisible(self, visible):
"""
Sets whether the Chart has grid lines in the y direction
:param visible: (boolean) - whether the Chart's y grid lines are drawn
:return: (io.deephaven.plot.Figure) this Chart
"""
return FigureWrapper(figure=self.figure.yGridLinesVisible(visible))
@_convertArguments
def yInvert(self, *args):
"""
Inverts the y-Axis so that larger values are closer to the origin.
*Overload 1*
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param invert: (boolean) - if true, larger values will be closer to the origin
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yInvert(*args))
@_convertArguments
def yLabel(self, label):
"""
Sets the label of the y-Axis
:param label: (java.lang.String) - pattern
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yLabel(label))
@_convertArguments
def yLabelFont(self, *args):
"""
Sets the font for the y-Axis label.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.yLabelFont(*args))
@_convertArguments
def yLog(self):
"""
Sets the AxisTransform of the y-Axis to log base 10
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yLog())
@_convertArguments
def yMax(self, *args):
"""
Sets the maximum of the y-Axis.
*Overload 1*
:param max: (double) - maximum of the y-range
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param valueColumn: (java.lang.String) - column in sds. The value in row 0 is used for the maximum.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yMax(*args))
@_convertArguments
def yMin(self, *args):
"""
Sets the minimum of the y-Axis.
*Overload 1*
:param min: (double) - minimum of the y-range
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param sds: (io.deephaven.plot.filters.SelectableDataSet) - selectable data set (e.g. OneClick filterable table)
:param valueColumn: (java.lang.String) - column in sds. The value in row 0 is used for the minimum.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yMin(*args))
@_convertArguments
def yMinorTicks(self, count):
"""
Sets the number of minor ticks between consecutive major ticks in the y-Axis. These minor ticks are
equally spaced.
:param count: (int) - number of minor ticks between consecutive major ticks.
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yMinorTicks(count))
@_convertArguments
def yMinorTicksVisible(self, visible):
"""
Sets whether the y-Axis minor ticks are visible.
:param visible: (boolean) - whether the minor ticks are visible
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yMinorTicksVisible(visible))
@_convertArguments
def yRange(self, min, max):
"""
Sets the range of the y-Axis
:param min: (double) - minimum of the range
:param max: (double) - maximum of the range
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yRange(min, max))
@_convertArguments
def yTickLabelAngle(self, angle):
"""
Sets the angle the tick labels the y-Axis are drawn at.
:param angle: (double) - angle in degrees
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yTickLabelAngle(angle))
@_convertArguments
def yTicks(self, *args):
"""
Sets the y-Axis ticks.
*Overload 1*
:param tickLocations: (double[]) - locations of the major ticks
:return: (io.deephaven.plot.Figure) this Axes
*Overload 2*
:param gapBetweenTicks: (double) - spacing between major ticks
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yTicks(*args))
@_convertArguments
def yTicksFont(self, *args):
"""
Sets the font for the y-Axis ticks.
*Overload 1*
:param font: (io.deephaven.plot.Font) - font
:return: (io.deephaven.plot.Figure) this Axis
*Overload 2*
:param family: (java.lang.String) - font family; if null, set to Arial
:param style: (java.lang.String) - font style; if null, set to Font.FontStyle PLAIN
:param size: (int) - the point size of the Font
:return: (io.deephaven.plot.Figure) this Axis
"""
return FigureWrapper(figure=self.figure.yTicksFont(*args))
@_convertArguments
def yTicksVisible(self, visible):
"""
Sets whether the y-Axis ticks are visible.
:param visible: (boolean) - whether the ticks are visible
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yTicksVisible(visible))
@_convertArguments
def yToolTipPattern(self, *args):
"""
Sets the y-value tooltip format.
*Overload 1*
:param format: (java.lang.String) - format
:return: (io.deephaven.plot.Figure) this data series.
*Overload 2*
:param format: java.lang.String
:param keys: java.lang.Object...
:return: io.deephaven.plot.Figure
"""
return FigureWrapper(figure=self.figure.yToolTipPattern(*args))
@_convertArguments
def yTransform(self, transform):
"""
Sets the AxisTransform of the y-Axis
:param transform: (io.deephaven.plot.axistransformations.AxisTransform) - transform
:return: (io.deephaven.plot.Figure) this Axes
"""
return FigureWrapper(figure=self.figure.yTransform(transform))
| 44.982179
| 170
| 0.593196
| 28,921
| 244,838
| 5.013623
| 0.026486
| 0.072828
| 0.076449
| 0.094283
| 0.914868
| 0.90591
| 0.896641
| 0.885296
| 0.878399
| 0.868834
| 0
| 0.007789
| 0.302551
| 244,838
| 5,442
| 171
| 44.990445
| 0.841341
| 0.757922
| 0
| 0.313929
| 0
| 0
| 0.0121
| 0.004396
| 0
| 0
| 0
| 0
| 0
| 1
| 0.301455
| false
| 0.002079
| 0.014553
| 0.006237
| 0.629938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
7e9354a4b687054e7eaa33ed5af4d8239dcbcc7c
| 23,413
|
py
|
Python
|
tests/test_stormtrack/test_core/test_features/data/circle_on_globe_clat-90_rad-800_delta-0.5_dyntools.py
|
ruestefa/stormtrack
|
e9378f013c406d387ea944c97e5adc68df864dee
|
[
"MIT"
] | null | null | null |
tests/test_stormtrack/test_core/test_features/data/circle_on_globe_clat-90_rad-800_delta-0.5_dyntools.py
|
ruestefa/stormtrack
|
e9378f013c406d387ea944c97e5adc68df864dee
|
[
"MIT"
] | 2
|
2021-01-06T17:37:42.000Z
|
2021-02-05T18:40:52.000Z
|
tests/test_stormtrack/test_core/test_features/data/circle_on_globe_clat-90_rad-800_delta-0.5_dyntools.py
|
ruestefa/stormtrack
|
e9378f013c406d387ea944c97e5adc68df864dee
|
[
"MIT"
] | null | null | null |
import numpy as np
# fmt: off
clon, clat = 0.0, 90.0
rad_km = 800.0
area_km2 = np.pi*rad_km**2
nlat, nlon = 16, 721
lat1d = np.linspace(82.5, 90.0, nlat)
lon1d = np.linspace(-180.0, 180.0, nlon)
lat2d, lon2d = np.meshgrid(lat1d, lon1d)
_, X = 0, 1
mask = np.array([
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X,X],
[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_],
], np.bool).T[:, ::-1]
| 709.484848
| 1,444
| 0.500448
| 10,874
| 23,413
| 1.010852
| 0.003311
| 1.967613
| 2.951146
| 3.934498
| 0.983897
| 0.983897
| 0.983897
| 0.983897
| 0.983897
| 0.983897
| 0
| 0.001671
| 0.002947
| 23,413
| 32
| 1,445
| 731.65625
| 0.4692
| 0.000342
| 0
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.037037
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
0e4c1425fc02a349ef9a697c3d3a0e49d4491e62
| 9,576
|
py
|
Python
|
tests/blocks/protocol/idmframer_spec.py
|
konsumer/luaradio
|
d349b82f992bb0e95fd68b8c2867399aa68a40ea
|
[
"MIT"
] | 559
|
2016-07-02T19:07:39.000Z
|
2022-03-28T15:02:21.000Z
|
tests/blocks/protocol/idmframer_spec.py
|
konsumer/luaradio
|
d349b82f992bb0e95fd68b8c2867399aa68a40ea
|
[
"MIT"
] | 68
|
2016-07-03T05:35:47.000Z
|
2022-03-30T21:24:07.000Z
|
tests/blocks/protocol/idmframer_spec.py
|
konsumer/luaradio
|
d349b82f992bb0e95fd68b8c2867399aa68a40ea
|
[
"MIT"
] | 64
|
2016-07-02T23:59:10.000Z
|
2022-02-02T18:11:07.000Z
|
import numpy
from generate import *
def generate():
def test_vector_wrapper(frames):
template = "require('radio.blocks.protocol.idmframer').IDMFrameType.vector_from_array({%s})"
return [template % (','.join(frames))]
frame1_bits = numpy.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0], dtype=numpy.bool_)
frame1_object = "{0x04, 0x17, 13897632, 172, 188, \"\\x02\\x00\\x00\\x0f\\x08\\x00\", \"\\x00\\x00\", \"\\x00\\x00\\x00\\x00\\x00\\x00\", 1436228, \"\\x01\\x80\\x80\\x60\\x30\\x18\\x08\\x06\\x03\\x01\\x00\\xc0\\xa0\\x70\\x38\\x14\\x0c\\x06\\x01\\x01\\x00\\xa0\\x40\\x10\\x0c\\x08\\x04\\x02\\x01\\x00\\x80\\xa0\\x50\\x28\\x14\\x0a\\x05\\x02\\x41\\x20\\x90\\x48\\x28\\x12\\x09\\x04\\x82\\x41\\x20\\xa0\\x48\\x24\\x12\", 1390, 0x67bf, 0x791a}"
frame2_bits = numpy.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1], dtype=numpy.bool_)
frame2_object = "{0x04, 0x17, 11278109, 247, 188, \"\\x02\\x01\\x00\\xef\\x09\\x00\", \"\\x00\\x00\", \"\\x00\\x00\\x00\\x00\\x00\\x00\", 339972, \"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x40\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x10\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x04\\x00\", 1472, 0xeaba, 0x57af}"
frame3_bits = numpy.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1], dtype=numpy.bool_)
frame3_object = "{0x04, 0x17, 1550406067, 34, 184, \"\\x00\\x05\\x00\\x0e\\x01\\x00\", \"\\x00\\x00\", \"\\x00\\x00\\x00\\x00\\x00\\x00\", 8231734, \"\\x07\\x83\\xc1\\xe1\\x00\\x68\\x48\\x22\\x11\\x07\\x83\\xc2\\x21\\x20\\x98\\x5c\\x36\\x18\\x08\\x84\\x42\\x41\\x20\\x80\\x3c\\x1c\\x17\\x09\\x85\\xc3\\x81\\x90\\xc0\\x60\\x20\\x13\\x0b\\x06\\x02\\x21\\x20\\x90\\x28\\x16\\x0c\\x06\\x03\\x41\\xa0\\xd0\\x70\\x34\\x26\", 2179, 0xeefa, 0xfd7d}"
vectors = []
x = numpy.hstack([random_bit(20), frame1_bits, random_bit(20)])
vectors.append(TestVector([], [x], test_vector_wrapper([frame1_object]), "Valid frame 1"))
x = numpy.hstack([random_bit(20), frame2_bits, random_bit(20)])
vectors.append(TestVector([], [x], test_vector_wrapper([frame2_object]), "Valid frame 2"))
x = numpy.hstack([random_bit(20), frame3_bits, random_bit(20)])
vectors.append(TestVector([], [x], test_vector_wrapper([frame3_object]), "Valid frame 3"))
x = numpy.hstack([random_bit(20), frame1_bits, random_bit(20)])
x[160] = not x[160]
vectors.append(TestVector([], [x], test_vector_wrapper([frame1_object]), "Frame 1 with message bit error"))
x = numpy.hstack([random_bit(20), frame2_bits, random_bit(20)])
x[748] = not x[748]
vectors.append(TestVector([], [x], test_vector_wrapper([frame2_object]), "Frame 2 with crc bit error"))
x = numpy.hstack([random_bit(20), frame1_bits, random_bit(20), frame2_bits, random_bit(20), frame3_bits, random_bit(20)])
vectors.append(TestVector([], [x], test_vector_wrapper([frame1_object, frame2_object, frame3_object]), "Three frames"))
return BlockSpec("IDMFramerBlock", vectors, 1e-6)
| 228
| 2,258
| 0.42878
| 2,664
| 9,576
| 1.520646
| 0.050676
| 0.687731
| 0.887929
| 1.030857
| 0.767218
| 0.766724
| 0.763762
| 0.763762
| 0.757344
| 0.704024
| 0
| 0.389893
| 0.260234
| 9,576
| 41
| 2,259
| 233.560976
| 0.181959
| 0
| 0
| 0.142857
| 1
| 0
| 0.044486
| 0.00825
| 0
| 0
| 0.006266
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.214286
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
0e9629cc8f5afaef7554b168eba23f7443b9e177
| 5,667
|
py
|
Python
|
test/integration/test_single_route.py
|
mathewmarcus/apispec-chalice
|
7e96b37342de3faa2f9c3aa022ba139edf5fcd1e
|
[
"MIT"
] | 15
|
2018-12-05T20:17:33.000Z
|
2022-02-16T20:51:16.000Z
|
test/integration/test_single_route.py
|
mathewmarcus/apispec-chalice
|
7e96b37342de3faa2f9c3aa022ba139edf5fcd1e
|
[
"MIT"
] | 1
|
2019-02-08T23:39:05.000Z
|
2019-12-19T11:23:10.000Z
|
test/integration/test_single_route.py
|
mathewmarcus/apispec-chalice
|
7e96b37342de3faa2f9c3aa022ba139edf5fcd1e
|
[
"MIT"
] | 3
|
2021-05-12T11:54:09.000Z
|
2021-09-17T08:10:12.000Z
|
def test_single_route_no_docstring_no_path_no_ops(app, spec):
@app.route('/gists/{gist_id}')
def gist_detail(gist_id):
pass
spec.add_path(app=app, view=gist_detail)
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {}
def test_single_route_docstring_no_path_no_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
spec.add_path(app=app, view=gist_detail)
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {
'responses': {
200: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
def test_single_route_no_docstring_path_no_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
pass
spec.add_path(app=app, path='/gists/{gist_id}', view=gist_detail)
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {}
def test_single_route_no_docstring_no_path_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
pass
spec.add_path(app=app, view=gist_detail, operations={'get': {}})
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {}
def test_single_route_docstring_path_no_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
spec.add_path(app=app, path='/gists/{gist_id}', view=gist_detail)
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {
'responses': {
200: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
def test_single_route_docstring_no_path_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
spec.add_path(app=app, view=gist_detail, operations={'get': {}})
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {
'responses': {
200: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
def test_single_route_no_docstring_path_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
pass
spec.add_path(app=app, view=gist_detail, operations={'get': {}}, path='/gists/{gist_id}')
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {}
def test_single_route_docstring_path_ops(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
spec.add_path(app=app, view=gist_detail, operations={'get': {}}, path='/gists/{gist_id}')
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {
'responses': {
200: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
def test_single_route_multiple_view_functions(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
@app.route('/gists/{gist_id}', methods=['PUT'])
def other_gist_detail(gist_id):
'''
---
put:
responses:
204:
schema:
$ref: '#/definitions/Gist'
'''
pass
spec.add_path(app=app, view=gist_detail)
spec.add_path(app=app, view=other_gist_detail)
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert 'put' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {
'responses': {
200: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
assert spec._paths['/gists/{gist_id}']['put'] == {
'responses': {
204: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
| 25.759091
| 93
| 0.496383
| 618
| 5,667
| 4.275081
| 0.053398
| 0.120363
| 0.179031
| 0.13626
| 0.981075
| 0.981075
| 0.955715
| 0.955715
| 0.947388
| 0.922407
| 0
| 0.009585
| 0.337215
| 5,667
| 219
| 94
| 25.876712
| 0.693823
| 0.1057
| 0
| 0.68254
| 0
| 0
| 0.215184
| 0
| 0
| 0
| 0
| 0
| 0.230159
| 1
| 0.150794
| false
| 0.079365
| 0
| 0
| 0.150794
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7efeb366a8f9552dc4e6daad794f0cdb015108c5
| 53
|
py
|
Python
|
bunny_prison.py
|
Arjuna197/the100
|
2963b4fe1b1b8e673a23b2cf97f4bcb263af9781
|
[
"MIT"
] | 1
|
2022-02-20T18:49:49.000Z
|
2022-02-20T18:49:49.000Z
|
bunny_prison.py
|
dan-garvey/the100
|
2963b4fe1b1b8e673a23b2cf97f4bcb263af9781
|
[
"MIT"
] | 13
|
2017-12-13T02:31:54.000Z
|
2017-12-13T02:37:45.000Z
|
bunny_prison.py
|
dan-garvey/the100
|
2963b4fe1b1b8e673a23b2cf97f4bcb263af9781
|
[
"MIT"
] | null | null | null |
def answer(x, y):
return ((x+y-2)*(x+y+1))/2-y+2
| 17.666667
| 34
| 0.490566
| 14
| 53
| 1.857143
| 0.5
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.169811
| 53
| 2
| 35
| 26.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7d30368e0a6d723bb404c31cb817ee70121181f9
| 5,213
|
py
|
Python
|
dist/cache.py
|
Ulfasaar/timeout_cache
|
eb4e616d14f1efe1ea817d7b5cbdbcfc48f07a95
|
[
"MIT"
] | null | null | null |
dist/cache.py
|
Ulfasaar/timeout_cache
|
eb4e616d14f1efe1ea817d7b5cbdbcfc48f07a95
|
[
"MIT"
] | null | null | null |
dist/cache.py
|
Ulfasaar/timeout_cache
|
eb4e616d14f1efe1ea817d7b5cbdbcfc48f07a95
|
[
"MIT"
] | null | null | null |
# Generated by Haxe 3.4.4
# coding: utf-8
from datetime import datetime as python_lib_datetime_Datetime
import time as python_lib_Time
class Date:
__slots__ = ("date",)
def __init__(self,year,month,day,hour,_hx_min,sec):
if (year < python_lib_datetime_Datetime.min.year):
year = python_lib_datetime_Datetime.min.year
if (day == 0):
day = 1
self.date = python_lib_datetime_Datetime(year,(month + 1),day,hour,_hx_min,sec,0)
@staticmethod
def now():
d = Date(1970,0,1,0,0,0)
d.date = python_lib_datetime_Datetime.now()
return d
class cache_Cache:
__slots__ = ("data", "_refresh", "_empty", "isInit")
def __init__(self,refresh,empty = None):
self.data = None
self.isInit = True
self._refresh = refresh
self._empty = empty
def get(self):
if self.isInit:
self.data = self._refresh()
self.isInit = False
return self.data
def refresh(self):
self.data = self._refresh()
def empty(self):
self.data = self._empty()
class cache_HybridCache(cache_Cache):
__slots__ = ("timeout", "hasElapsed", "current_time", "prev_time", "diff_time", "current_version", "get_version")
def __init__(self,timeout_ms,refresh,get_version,empty = None):
self.get_version = None
self.current_version = None
self.diff_time = None
self.prev_time = None
self.current_time = None
self.timeout = None
self.hasElapsed = False
super().__init__(refresh,empty)
self.timeout = timeout_ms
self.current_time = (python_lib_Time.mktime(Date.now().date.timetuple()) * 1000)
self.prev_time = self.current_time
self.diff_time = (self.current_time - self.prev_time)
self.get_version = get_version
self.current_version = self.get_version()
def version(self):
return self.current_version
def refresh(self):
self.current_version = self.get_version()
self.diff_time = 0
self.current_time = (python_lib_Time.mktime(Date.now().date.timetuple()) * 1000)
self.prev_time = self.current_time
self.data = self._refresh()
def get(self):
if (self.isInit == False):
if (self.timeout != -1):
self.current_time = (python_lib_Time.mktime(Date.now().date.timetuple()) * 1000)
self.diff_time = (self.current_time - self.prev_time)
if (self.diff_time >= self.timeout):
external_version = self.get_version()
if (self.current_version < external_version):
self.data = self._refresh()
self.current_version = external_version
self.prev_time = self.current_time
else:
self.refresh()
self.isInit = False
return self.data
class cache_TimeoutCache(cache_Cache):
__slots__ = ("timeout", "hasElapsed", "current_time", "prev_time", "diff_time")
def __init__(self,timeout_ms,refresh,empty = None):
self.diff_time = None
self.prev_time = None
self.current_time = None
self.timeout = None
self.hasElapsed = False
super().__init__(refresh,empty)
self.timeout = timeout_ms
self.current_time = (python_lib_Time.mktime(Date.now().date.timetuple()) * 1000)
self.prev_time = self.current_time
self.diff_time = (self.current_time - self.prev_time)
def refresh(self):
self.diff_time = 0
self.current_time = (python_lib_Time.mktime(Date.now().date.timetuple()) * 1000)
self.prev_time = self.current_time
self.data = self._refresh()
def get(self):
if (self.isInit == False):
if (self.timeout != -1):
self.current_time = (python_lib_Time.mktime(Date.now().date.timetuple()) * 1000)
self.diff_time = (self.current_time - self.prev_time)
if (self.diff_time >= self.timeout):
self.data = self._refresh()
self.prev_time = self.current_time
else:
self.data = self._refresh()
self.isInit = False
return self.data
class cache_VersionedCache(cache_Cache):
__slots__ = ("current_version", "get_version")
def __init__(self,refresh,get_version,empty = None):
self.get_version = None
self.current_version = 0.0
super().__init__(refresh,empty)
self.get_version = get_version
self._empty = empty
self.current_version = self.get_version()
def version(self):
return self.current_version
def refresh(self):
self.current_version = self.get_version()
self.data = self._refresh()
def get(self):
if self.isInit:
self.data = self._refresh()
self.isInit = False
else:
external_version = self.get_version()
if (self.current_version < external_version):
self.data = self._refresh()
self.current_version = external_version
return self.data
| 32.179012
| 117
| 0.605218
| 629
| 5,213
| 4.707472
| 0.096979
| 0.111449
| 0.091185
| 0.064168
| 0.83384
| 0.783181
| 0.753462
| 0.705505
| 0.684566
| 0.684566
| 0
| 0.012631
| 0.286208
| 5,213
| 161
| 118
| 32.378882
| 0.783123
| 0.007098
| 0
| 0.72
| 1
| 0
| 0.033649
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136
| false
| 0
| 0.016
| 0.016
| 0.288
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
adb967649ee1c9264a3757c420f50a34fcebf13f
| 274
|
py
|
Python
|
Codewars/8kyu/localize-the-barycenter-of-a-triangle/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/localize-the-barycenter-of-a-triangle/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/localize-the-barycenter-of-a-triangle/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 2.7.6
test.describe('Example Tests')
test.assert_equals(bar_triang([4, 6], [12, 4], [10, 10]), [8.6667, 6.6667])
test.assert_equals(bar_triang([4, 2], [12, 2], [6, 10]), [7.3333, 4.6667])
test.assert_equals(bar_triang([4, 8], [8, 2], [16, 6]), [9.3333, 5.3333])
| 39.142857
| 75
| 0.613139
| 53
| 274
| 3.056604
| 0.396226
| 0.185185
| 0.296296
| 0.351852
| 0.530864
| 0.530864
| 0.37037
| 0
| 0
| 0
| 0
| 0.234568
| 0.113139
| 274
| 6
| 76
| 45.666667
| 0.432099
| 0.051095
| 0
| 0
| 0
| 0
| 0.050388
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
adccce252b052efe11c510315be9e250750bf1f7
| 31
|
py
|
Python
|
_00_Generic-Python-Research_misc/Python_concepts/special_variables/tst.py
|
VProfirov/Telerik-Academy-Course_Python-version_Homeworks-and-Exams
|
d8dadce0a3084d466d522292038ef1ff3b876891
|
[
"MIT"
] | null | null | null |
_00_Generic-Python-Research_misc/Python_concepts/special_variables/tst.py
|
VProfirov/Telerik-Academy-Course_Python-version_Homeworks-and-Exams
|
d8dadce0a3084d466d522292038ef1ff3b876891
|
[
"MIT"
] | null | null | null |
_00_Generic-Python-Research_misc/Python_concepts/special_variables/tst.py
|
VProfirov/Telerik-Academy-Course_Python-version_Homeworks-and-Exams
|
d8dadce0a3084d466d522292038ef1ff3b876891
|
[
"MIT"
] | null | null | null |
print(f'__name__ = {__name__}')
| 31
| 31
| 0.709677
| 4
| 31
| 3.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 31
| 1
| 31
| 31
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
70f992331f3c46d0ed91ff41378250bcb015edec
| 101,193
|
py
|
Python
|
src/pyrin/database/orm/sql/schema/columns.py
|
wilsonGmn/pyrin
|
25dbe3ce17e80a43eee7cfc7140b4c268a6948e0
|
[
"BSD-3-Clause"
] | null | null | null |
src/pyrin/database/orm/sql/schema/columns.py
|
wilsonGmn/pyrin
|
25dbe3ce17e80a43eee7cfc7140b4c268a6948e0
|
[
"BSD-3-Clause"
] | null | null | null |
src/pyrin/database/orm/sql/schema/columns.py
|
wilsonGmn/pyrin
|
25dbe3ce17e80a43eee7cfc7140b4c268a6948e0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
orm sql schema columns module.
"""
from sqlalchemy.sql.type_api import Variant
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy import BigInteger, Integer, ForeignKey, String, Unicode, Boolean, \
SmallInteger, Float, Date, Time, Text, UnicodeText, DECIMAL
import pyrin.utils.misc as misc_utils
from pyrin.database.enumerations import DialectEnum
from pyrin.database.orm.sql.schema.base import CoreColumn
from pyrin.database.orm.types.custom import CoreDateTime, CoreTimeStamp
from pyrin.database.orm.sql.schema.mixin import SequenceColumnMixin, GUIDColumnMixin, \
TypeMixin
from pyrin.database.orm.sql.schema.exceptions import AutoPKColumnTypeIsInvalidError, \
InvalidFKColumnReferenceTypeError, StringColumnTypeIsInvalidError, \
TextColumnTypeIsInvalidError
class StringColumn(CoreColumn):
"""
string column class.
this is a helper class to define columns that their type
is an instance or subclass of sqlalchemy `String`.
"""
DEFAULT_TYPE = Unicode
def __init__(self, *args, **kwargs):
"""
initializes an instance of StringColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
this argument may be the second positional argument, or
specified via keyword.
the type must be an instance or subclass of sqlalchemy
`String` type. defaults to `Unicode` if not provided.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to True if not provided.
:keyword int min_length: minimum length of value for this column.
defaults to None if not provided.
:keyword int max_length: maximum length of value for this column.
if provided, and the type of this column is a
class, it will be instantiated with this length.
defaults to None if not provided.
:keyword bool allow_blank: specifies that this column could have blank string
value. defaults to False if not provided.
:keyword bool allow_whitespace: specifies that this column could have whitespace
string value. defaults to False if not provided.
:keyword object | callable min_value: minimum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword object | callable max_value: maximum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_in: list of valid values for this column.
it could also be a callable without
any inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key and
also column name is provided, it will
result in check constraint generation
on database. otherwise it will be ignored
and could be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_not_in: list of invalid values for this column.
it could also be a callable without any
inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key
and also column name is provided, it
will result in check constraint
generation on database. otherwise it
will be ignored and could be used
in validators.
defaults to None if not provided.
:note check_in, check_not_in: only one of these options could be provided.
otherwise it raises an error.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises StringColumnTypeIsInvalidError: string column type is invalid error.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
self.min_length = kwargs.pop('min_length', None)
self.max_length = kwargs.pop('max_length', None)
self.allow_blank = kwargs.pop('allow_blank', False)
self.allow_whitespace = kwargs.pop('allow_whitespace', False)
args = list(args)
name, type_ = self._extract_name_and_type(args, kwargs)
if type_ is None:
type_ = self.DEFAULT_TYPE
if not misc_utils.is_subclass_or_instance(type_, String):
raise StringColumnTypeIsInvalidError('The string column type must be '
'an instance or subclass of [{string}].'
.format(string=String))
if isinstance(type_, type) and self.max_length is not None:
type_ = type_(length=self.max_length)
kwargs.update(name=name, type_=type_)
super().__init__(*args, **kwargs)
def _copy_custom_attributes(self, column):
"""
copies current column's custom attributes into given column.
this method is implemented to be able to create valid column
attributes on copy by sqlalchemy.
subclasses must override this method and call
`super()._copy_custom_attributes()` at the end.
the changes must be done to given column in-place.
:param CoreColumn column: copied column instance.
"""
column.min_length = self.min_length
column.max_length = self.max_length
column.allow_blank = self.allow_blank
column.allow_whitespace = self.allow_whitespace
super()._copy_custom_attributes(column)
class PKColumn(CoreColumn):
"""
pk column class.
this is a helper class for defining pk columns.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of PKColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
this argument may be the second positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword bool autoincrement: set up `auto increment` semantics for an
integer primary key column.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
defaults to True if not provided.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to False if not provided.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
kwargs.update(nullable=False, primary_key=True)
kwargs.setdefault('allow_write', False)
kwargs.setdefault('index', True)
kwargs.pop('onupdate', None)
kwargs.pop('server_onupdate', None)
kwargs.pop('unique', None)
super().__init__(*args, **kwargs)
class AutoPKColumn(PKColumn):
"""
auto pk column class.
this is a helper class for defining pk columns with auto increment value.
this type of pk column's value is not available to python side without commit or flush.
this type of columns handle autoincrement correctly also on sqlite backend.
"""
DEFAULT_TYPE = BigInteger
def __init__(self, *args, **kwargs):
"""
initializes an instance of AutoPKColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
it must be an instance or subclass of `Integer` type.
defaults to `DEFAULT_TYPE` if not provided.
this argument may be the second positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
defaults to True if not provided.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to False if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to True
if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises AutoPKColumnTypeIsInvalidError: auto pk column type is invalid error.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
args = list(args)
name, type_ = self._extract_name_and_type(args, kwargs)
if type_ is None:
type_ = self.DEFAULT_TYPE
is_variant = isinstance(type_, Variant)
if not misc_utils.is_subclass_or_instance(type_, Integer) and \
not (is_variant and misc_utils.is_subclass_or_instance(type_.impl, Integer)):
raise AutoPKColumnTypeIsInvalidError('The auto pk column type must be an '
'instance or subclass of [{integer}].'
.format(integer=Integer))
if not is_variant and isinstance(type_, type):
type_ = type_()
# this is for sqlite to handle autoincrement correctly also on other
# variants of Integer. such as BigInteger and SmallInteger.
if not is_variant:
type_ = type_.with_variant(Integer, DialectEnum.SQLITE)
kwargs.update(name=name, type_=type_, autoincrement=True, min_value=1)
kwargs.pop('default', None)
kwargs.pop('server_default', None)
kwargs.update(validated=False)
kwargs.setdefault('validated_find', True)
super().__init__(*args, **kwargs)
class GUIDPKColumn(GUIDColumnMixin, PKColumn):
"""
guid pk column class.
this is a helper class for defining pk columns that their value is a guid.
this type of pk column's value is available to python side without commit or flush.
"""
pass
class SequencePKColumn(SequenceColumnMixin, PKColumn):
"""
sequence pk column class.
this is a helper class for defining pk columns that gain their value from a sequence.
it differs from columns that set `autoincrement=True`, because the value of sequence
columns is available to python side without commit or flush. and also a table can have
multiple sequence columns which is impossible for auto increment columns.
"""
pass
class FKColumn(CoreColumn):
"""
fk column class.
this is a helper class for defining fk columns.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of FKColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
this argument may be the second positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword str | CoreColumn fk: reference column name or instance.
this parameter is required but to
prevent errors it has to be set as keyword.
:keyword str fk_on_update: optional string. if set, emit ON UPDATE <value> when
issuing DDL for this constraint. typical values include
CASCADE, DELETE and RESTRICT.
:keyword str fk_on_delete: optional string. if set, emit ON DELETE <value> when
issuing DDL for this constraint. typical values include
CASCADE, DELETE and RESTRICT.
:keyword bool autoincrement: set up `auto increment` semantics for an
integer primary key column.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
defaults to True if not provided.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
defaults to False if not provided.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to True if not provided.
:keyword object | callable min_value: minimum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword object | callable max_value: maximum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_in: list of valid values for this column.
it could also be a callable without
any inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key and
also column name is provided, it will
result in check constraint generation
on database. otherwise it will be ignored
and could be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_not_in: list of invalid values for this column.
it could also be a callable without any
inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key
and also column name is provided, it
will result in check constraint
generation on database. otherwise it
will be ignored and could be used
in validators.
defaults to None if not provided.
:note check_in, check_not_in: only one of these options could be provided.
otherwise it raises an error.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
self._fk = kwargs.pop('fk', None)
self._fk_on_update = kwargs.pop('fk_on_update', None)
self._fk_on_delete = kwargs.pop('fk_on_delete', None)
kwargs.setdefault('index', True)
kwargs.setdefault('nullable', False)
super().__init__(*args, **kwargs)
def _get_custom_schema_items(self):
"""
gets custom schema items for this column.
it will generate required fk constraint.
:raises InvalidFKColumnReferenceTypeError: invalid fk column reference type error.
:rtype: list
"""
if self._fk is not None and not isinstance(self._fk, (str,
InstrumentedAttribute,
CoreColumn)):
raise InvalidFKColumnReferenceTypeError('Foreign key reference column '
'must be a string or a column instance.')
# this is to prevent sqlalchemy errors.
# because metadata uses uninitialized entities.
if self._fk is None:
self._fk = ''
return [ForeignKey(self._fk,
onupdate=self._fk_on_update,
ondelete=self._fk_on_delete)]
def _copy_custom_attributes(self, column):
"""
copies current column's custom attributes into given column.
this method is implemented to be able to create valid column
attributes on copy by sqlalchemy.
subclasses must override this method and call
`super()._copy_custom_attributes()` at the end.
the changes must be done to given column in-place.
:param CoreColumn column: copied column instance.
"""
column._fk = self._fk
column._fk_on_update = self._fk_on_update
column._fk_on_delete = self._fk_on_delete
super()._copy_custom_attributes(column)
class HiddenColumn(CoreColumn):
"""
hidden column class.
this is a helper class for defining hidden columns.
hidden columns will not be included in entity to dict conversion
and also they won't get populated on conversion from dict.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of HiddenColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
this argument may be the second positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword bool autoincrement: set up `auto increment` semantics for an
integer primary key column.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword object | callable min_value: minimum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword object | callable max_value: maximum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_in: list of valid values for this column.
it could also be a callable without
any inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key and
also column name is provided, it will
result in check constraint generation
on database. otherwise it will be ignored
and could be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_not_in: list of invalid values for this column.
it could also be a callable without any
inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key
and also column name is provided, it
will result in check constraint
generation on database. otherwise it
will be ignored and could be used
in validators.
defaults to None if not provided.
:note check_in, check_not_in: only one of these options could be provided.
otherwise it raises an error.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
"""
kwargs.update(allow_read=False)
kwargs.update(allow_write=False)
super().__init__(*args, **kwargs)
class SequenceColumn(SequenceColumnMixin, CoreColumn):
"""
sequence column class.
this is a helper class for defining columns which gain their value from a sequence.
a table can have multiple sequence columns.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of SequenceColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
this argument may be the second positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword str sequence: sequence name to be generated for this column.
this value is required, but has to be set as
keyword to prevent errors.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
defaults to False if not provided.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
defaults to True if not provided.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to False if not provided.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:keyword int cache: cache size for sequence.
defaults to `DEFAULT_CACHE`, if not provided.
to disable cache, you can pass it as None or `0`.
note that cache is per session, so if you stop the
connection to database and start it again, the new
session will get its own cache. but it's good for
performance to have cache on sequences, gaps are not
bad at all.
:raises SequenceColumnTypeIsInvalidError: sequence column type is invalid error.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
kwargs.setdefault('unique', True)
super().__init__(*args, **kwargs)
class GUIDColumn(GUIDColumnMixin, CoreColumn):
"""
guid column class.
this is a helper class for defining columns which their value is a guid.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of GUIDColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
defaults to False if not provided.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
defaults to True if not provided.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to False if not provided.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
kwargs.setdefault('unique', True)
super().__init__(*args, **kwargs)
class BooleanColumn(TypeMixin, CoreColumn):
"""
boolean column class.
this is a helper class for defining columns which their value is a boolean.
"""
_column_type = Boolean
class BigIntegerColumn(TypeMixin, CoreColumn):
"""
big integer column class.
this is a helper class for defining columns which their value is a big integer.
"""
_column_type = BigInteger
class IntegerColumn(TypeMixin, CoreColumn):
"""
integer column class.
this is a helper class for defining columns which their value is an integer.
"""
_column_type = Integer
class SmallIntegerColumn(TypeMixin, CoreColumn):
"""
small integer column class.
this is a helper class for defining columns which their value is a small integer.
"""
_column_type = SmallInteger
class FloatColumn(TypeMixin, CoreColumn):
"""
float column class.
this is a helper class for defining columns which their value is a float.
"""
_column_type = Float
class DecimalColumn(TypeMixin, CoreColumn):
"""
decimal column class.
this is a helper class for defining columns which their value is a decimal.
"""
_column_type = DECIMAL
class DateTimeColumn(TypeMixin, CoreColumn):
"""
datetime column class.
this is a helper class for defining columns which their value is a datetime.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of DateTimeColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword bool timezone: specifies that this column is timezone aware.
defaults to True if not provided.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to True if not provided.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
timezone = kwargs.pop('timezone', True)
self._column_type = CoreDateTime(timezone=timezone)
super().__init__(*args, **kwargs)
class DateColumn(TypeMixin, CoreColumn):
"""
date column class.
this is a helper class for defining columns which their value is a date.
"""
_column_type = Date
class TimeColumn(TypeMixin, CoreColumn):
"""
time column class.
this is a helper class for defining columns which their value is a time.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of TimeColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword bool timezone: specifies that this column is timezone aware.
actually timezone offset has no meaning for a
single time without a date so use it with cautious.
if a time is for a recurring item at the same
time on any timezone it should have timezone=False.
and if a time requires timezone to let remote
timezones deal with it correctly, you should
define a time column with timezone=False and
another column to keep the timezone `name` for
this time. in most cases keeping the time with
timezone offset is incorrect unless you know
what you are doing.
defaults to False if not provided.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to True if not provided.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
timezone = kwargs.pop('timezone', False)
self._column_type = Time(timezone=timezone)
super().__init__(*args, **kwargs)
class TimeStampColumn(TypeMixin, CoreColumn):
"""
timestamp column class.
this is a helper class for defining columns which their value is a datetime.
"""
def __init__(self, *args, **kwargs):
"""
initializes an instance of TimeStampColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword bool timezone: specifies that this column is timezone aware.
defaults to True if not provided.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to True if not provided.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
timezone = kwargs.pop('timezone', True)
self._column_type = CoreTimeStamp(timezone=timezone)
super().__init__(*args, **kwargs)
class TextColumn(StringColumn):
"""
text column class.
this is a helper class to define columns that their type is sqlalchemy `Text`.
"""
DEFAULT_TYPE = UnicodeText
def __init__(self, *args, **kwargs):
"""
initializes an instance of TextColumn.
:param str name: the name of this column as represented in the database.
this argument may be the first positional argument, or
specified via keyword.
:param TypeEngine type_: the column's type, indicated using an instance which
if no arguments are required for the type, the class of
the type can be sent as well.
this argument may be the second positional argument, or
specified via keyword.
the type must be an instance or subclass of sqlalchemy
`Text` type. defaults to `UnicodeText` if not provided.
:param object *args: additional positional arguments include various
`SchemaItem` derived constructs which will be applied
as options to the column.
:keyword callable | object default: a scalar, python callable or `ColumnElement`
expression representing the default value
for this column, which will be invoked upon
insert if this column is otherwise not
specified in the values clause of the insert.
:keyword str doc: optional string that can be used by the ORM or similar
to document attributes on the python side.
:keyword str key: an optional string identifier which will identify this
`Column` object on the `Table`.
:keyword bool index: when `True`, indicates that the column is indexed.
:keyword dict info: optional data dictionary which will be populated into the
`SchemaItem.info` attribute of this object.
:keyword bool nullable: when set to `False`, will cause the `Not NULL`
phrase to be added when generating ddl for the column.
:keyword callable | object onupdate: a scalar, python callable, or
`ClauseElement` representing a default
value to be applied to the column within update
statements, which will be invoked upon update
if this column is not present in the set
clause of the update.
:keyword bool primary_key: if `True`, marks this column as a primary key
column. multiple columns can have this flag set to
specify composite primary keys.
:keyword str | ClauseElement | TextClause server_default: a `FetchedValue` instance,
str, unicode or `text`
construct representing the ddl
default value for the column.
:keyword str | ClauseElement | TextClause server_onupdate: a `FetchedValue` instance
representing a database-side
default generation function,
such as a trigger. this
indicates to sqlalchemy
that a newly generated value
will be available after
updates. this construct
does not actually implement
any kind of generation
function within the database,
which instead must be specified
separately.
:keyword bool quote: force quoting of this column's name on or off,
corresponding to `True` or `False`. when left at its default
of `None`, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word.
:keyword bool unique: when `True`, indicates that this column contains a
unique constraint, or if `index` is `True` as well, indicates
that the `index` should be created with the unique flag.
:keyword bool system: when `True`, indicates this is a system column,
that is a column which is automatically made available by the
database, and should not be included in the columns list for a
`create table` statement.
:keyword str comment: optional string that will render an sql comment
on table creation.
:keyword bool allow_read: specifies that the column should be
included in entity to dict conversion.
defaults to True if not provided.
:keyword bool allow_write: specifies that the column should be
populated on conversion from dict.
defaults to True if not provided.
:keyword int min_length: minimum length of value for this column.
defaults to None if not provided.
:keyword int max_length: maximum length of value for this column.
if provided, and the type of this column is a
class, it will be instantiated with this length.
defaults to None if not provided.
:keyword bool allow_blank: specifies that this column could have blank string
value. defaults to False if not provided.
:keyword bool allow_whitespace: specifies that this column could have whitespace
string value. defaults to False if not provided.
:keyword object | callable min_value: minimum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword object | callable max_value: maximum value that this column could have.
it could also be a callable without any inputs.
if a non-callable is provided and the column is
not a primary key and also column name is provided,
it will result in check constraint generation on
database. otherwise it will be ignored and could
be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_in: list of valid values for this column.
it could also be a callable without
any inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key and
also column name is provided, it will
result in check constraint generation
on database. otherwise it will be ignored
and could be used in validators.
defaults to None if not provided.
:keyword list | CoreEnum | callable check_not_in: list of invalid values for this column.
it could also be a callable without any
inputs or an enum class. if a
non-callable or enum class is provided
and the column is not a primary key
and also column name is provided, it
will result in check constraint
generation on database. otherwise it
will be ignored and could be used
in validators.
defaults to None if not provided.
:note check_in, check_not_in: only one of these options could be provided.
otherwise it raises an error.
:keyword bool validated: specifies that an automatic validator for this column
must be registered, that is usable through validator
services for create and update.
defaults to True if not provided.
:keyword bool validated_find: specifies that an automatic find validator for this
column must be registered, that is usable through
validator services for find. defaults to `validated`
value if not provided.
:keyword bool validated_range: specifies that automatic find range validators for this
column must be registered, that is usable through
validator services for find. defaults to `validated_find`
value if not provided.
note that find range validators are constructed with
names `from_*` and `to_*` for given column if it
is a number, string or any variant of date and time.
if the type of column is anything else no range
validator will be registered for it and this value
will be ignored.
:raises TextColumnTypeIsInvalidError: text column type is invalid error.
:raises InvalidColumnAccessLevelError: invalid column access level error.
"""
args = list(args)
name, type_ = self._extract_name_and_type(args, kwargs)
if type_ is None:
type_ = self.DEFAULT_TYPE
if not misc_utils.is_subclass_or_instance(type_, Text):
raise TextColumnTypeIsInvalidError('The text column type must be an '
'instance or subclass of [{text}].'
.format(text=Text))
kwargs.update(name=name, type_=type_)
super().__init__(*args, **kwargs)
| 56.564002
| 98
| 0.496141
| 9,900
| 101,193
| 5.03
| 0.043636
| 0.027311
| 0.023495
| 0.028516
| 0.902022
| 0.896801
| 0.889632
| 0.883688
| 0.880736
| 0.868667
| 0
| 0.000056
| 0.473056
| 101,193
| 1,788
| 99
| 56.595638
| 0.933818
| 0.829563
| 0
| 0.313725
| 0
| 0
| 0.061169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091503
| false
| 0.013072
| 0.058824
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb0c8c4e5149a73d021edb87b2bab0aa2321186a
| 78,197
|
py
|
Python
|
poem/Poem/api/tests/test_thresholdsprofiles.py
|
ARGOeu/poem-react-ui
|
f21ceddf53471fd947d8a296d629782e61489b91
|
[
"Apache-2.0"
] | null | null | null |
poem/Poem/api/tests/test_thresholdsprofiles.py
|
ARGOeu/poem-react-ui
|
f21ceddf53471fd947d8a296d629782e61489b91
|
[
"Apache-2.0"
] | 139
|
2020-04-06T09:22:16.000Z
|
2021-08-02T06:39:22.000Z
|
poem/Poem/api/tests/test_thresholdsprofiles.py
|
vrdel/poem-2
|
42672fc066b71b4958168d8031e1da0043ac6d1f
|
[
"Apache-2.0"
] | 3
|
2019-07-10T09:37:38.000Z
|
2020-04-02T10:48:38.000Z
|
import json
from collections import OrderedDict
from unittest.mock import patch
from Poem.api import views_internal as views
from Poem.poem import models as poem_models
from Poem.users.models import CustUser
from django.contrib.contenttypes.models import ContentType
from django.core import serializers
from django_tenants.test.cases import TenantTestCase
from django_tenants.test.client import TenantRequestFactory
from rest_framework import status
from rest_framework.test import force_authenticate
from .utils_test import mocked_func, encode_data
class ListThresholdsProfilesAPIViewTests(TenantTestCase):
def setUp(self):
self.factory = TenantRequestFactory(self.tenant)
self.view = views.ListThresholdsProfiles.as_view()
self.url = '/api/v2/internal/thresholdsprofiles/'
self.user = CustUser.objects.create_user(username='testuser')
self.limited_user = CustUser.objects.create_user(username='limited')
self.superuser = CustUser.objects.create_user(
username='poem', is_superuser=True
)
self.tp1 = poem_models.ThresholdsProfiles.objects.create(
name='TEST_PROFILE',
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee',
groupname='GROUP'
)
self.tp2 = poem_models.ThresholdsProfiles.objects.create(
name='ANOTHER_PROFILE',
apiid='12341234-oooo-kkkk-aaaa-aaeekkccnnee'
)
self.tp3 = poem_models.ThresholdsProfiles.objects.create(
name='TEST_PROFILE3',
apiid='piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a',
groupname='GROUP2'
)
group1 = poem_models.GroupOfThresholdsProfiles.objects.create(
name='GROUP'
)
group2 = poem_models.GroupOfThresholdsProfiles.objects.create(
name='NEWGROUP'
)
group3 = poem_models.GroupOfThresholdsProfiles.objects.create(
name='GROUP2'
)
group1.thresholdsprofiles.add(self.tp1)
group3.thresholdsprofiles.add(self.tp3)
userprofile = poem_models.UserProfile.objects.create(user=self.user)
userprofile.groupsofthresholdsprofiles.add(group1)
userprofile.groupsofthresholdsprofiles.add(group2)
poem_models.UserProfile.objects.create(user=self.limited_user)
poem_models.UserProfile.objects.create(user=self.superuser)
self.ct = ContentType.objects.get_for_model(
poem_models.ThresholdsProfiles
)
data = json.loads(
serializers.serialize(
'json', [self.tp1],
use_natural_foreign_keys=True,
use_natural_primary_keys=True
)
)
data[0]['fields'].update({
'rules': [
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
})
poem_models.TenantHistory.objects.create(
object_id=self.tp1.id,
serialized_data=json.dumps(data),
object_repr=self.tp1.__str__(),
comment='Initial version.',
user='testuser',
content_type=self.ct
)
data = json.loads(
serializers.serialize(
'json', [self.tp2],
use_natural_foreign_keys=True,
use_natural_primary_keys=True
)
)
data[0]['fields'].update({
'rules': [
{
'metric': 'metricB',
'thresholds': 'freshness=1s;10;9:;0;25'
}
]
})
poem_models.TenantHistory.objects.create(
object_id=self.tp2.id,
serialized_data=json.dumps(data),
object_repr=self.tp2.__str__(),
comment='Initial version.',
user='testuser',
content_type=self.ct
)
data = json.loads(
serializers.serialize(
'json', [self.tp3],
use_natural_foreign_keys=True,
use_natural_primary_keys=True
)
)
data[0]['fields'].update({
'rules': [
{
'metric': 'metricC',
'thresholds': 'freshness=1s;10;9:;0;25'
}
]
})
poem_models.TenantHistory.objects.create(
object_id=self.tp3.id,
serialized_data=json.dumps(data),
object_repr=self.tp3.__str__(),
comment='Initial version.',
user='poem',
content_type=self.ct
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_all_thresholds_profiles_superuser(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(
response.data,
[
OrderedDict([
('name', 'ANOTHER_PROFILE'),
('description', ''),
('apiid', '12341234-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', '')
]),
OrderedDict([
('name', 'TEST_PROFILE'),
('description', ''),
('apiid', '00000000-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', 'GROUP')
]),
OrderedDict([
('name', 'TEST_PROFILE3'),
('description', ''),
('apiid', 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a'),
('groupname', 'GROUP2')
])
]
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_all_thresholds_profiles_user(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(
response.data,
[
OrderedDict([
('name', 'ANOTHER_PROFILE'),
('description', ''),
('apiid', '12341234-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', '')
]),
OrderedDict([
('name', 'TEST_PROFILE'),
('description', ''),
('apiid', '00000000-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', 'GROUP')
]),
OrderedDict([
('name', 'TEST_PROFILE3'),
('description', ''),
('apiid', 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a'),
('groupname', 'GROUP2')
])
]
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_all_thresholds_profiles_limited_user(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(
response.data,
[
OrderedDict([
('name', 'ANOTHER_PROFILE'),
('description', ''),
('apiid', '12341234-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', '')
]),
OrderedDict([
('name', 'TEST_PROFILE'),
('description', ''),
('apiid', '00000000-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', 'GROUP')
]),
OrderedDict([
('name', 'TEST_PROFILE3'),
('description', ''),
('apiid', 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a'),
('groupname', 'GROUP2')
])
]
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profiles_if_no_authentication(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profile_by_name_superuser(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url + 'TEST_PROFILE')
force_authenticate(request, user=self.superuser)
response = self.view(request, 'TEST_PROFILE')
self.assertEqual(
response.data,
OrderedDict([
('name', 'TEST_PROFILE'),
('description', ''),
('apiid', '00000000-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', 'GROUP')
])
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profile_by_name_user(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url + 'TEST_PROFILE')
force_authenticate(request, user=self.user)
response = self.view(request, 'TEST_PROFILE')
self.assertEqual(
response.data,
OrderedDict([
('name', 'TEST_PROFILE'),
('description', ''),
('apiid', '00000000-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', 'GROUP')
])
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profile_by_name_limited_user(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url + 'TEST_PROFILE')
force_authenticate(request, user=self.limited_user)
response = self.view(request, 'TEST_PROFILE')
self.assertEqual(
response.data,
OrderedDict([
('name', 'TEST_PROFILE'),
('description', ''),
('apiid', '00000000-oooo-kkkk-aaaa-aaeekkccnnee'),
('groupname', 'GROUP')
])
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profile_by_nonexisting_name_superuser(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url + 'nonexisting')
force_authenticate(request, user=self.superuser)
response = self.view(request, 'nonexisting')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile does not exist.'
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profile_by_nonexisting_name_user(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url + 'nonexisting')
force_authenticate(request, user=self.user)
response = self.view(request, 'nonexisting')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile does not exist.'
)
@patch('Poem.api.internal_views.thresholdsprofiles.sync_webapi')
def test_get_thresholds_profile_by_nonexisting_name_limited_usr(self, func):
func.side_effect = mocked_func
request = self.factory.get(self.url + 'nonexisting')
force_authenticate(request, user=self.limited_user)
response = self.view(request, 'nonexisting')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile does not exist.'
)
def test_put_thresholds_profile_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'NEWGROUP')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
group2 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group1.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
self.assertFalse(
group2.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 2)
comment_set = set()
for item in json.loads(history[0].comment):
comment_set.add(json.dumps(item))
self.assertEqual(
comment_set,
{'{"changed": {"fields": ["groupname", "rules"]}}'}
)
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
)
def test_put_thresholds_profile_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'NEWGROUP')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
group2 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group1.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
self.assertFalse(
group2.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 2)
comment_set = set()
for item in json.loads(history[0].comment):
comment_set.add(json.dumps(item))
self.assertEqual(
comment_set,
{'{"changed": {"fields": ["groupname", "rules"]}}'}
)
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
)
def test_put_thresholds_profile_user_wrong_group(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'GROUP2',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to assign thresholds profiles to the '
'given group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'GROUP')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
group2 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group2.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
self.assertFalse(
group1.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
)
def test_put_thresholds_profile_user_wrong_initial_group(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles assigned '
'to this group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'GROUP')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
group2 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group2.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
self.assertFalse(
group1.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
)
def test_put_thresholds_profile_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'GROUP2',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'GROUP')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
group2 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group2.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
self.assertFalse(
group1.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
)
def test_put_thresholds_profile_without_groupname_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '12341234-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp2.id)
self.assertEqual(tp.name, 'ANOTHER_PROFILE')
self.assertEqual(tp.groupname, 'NEWGROUP')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
self.assertTrue(
group1.thresholdsprofiles.filter(
apiid='12341234-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 2)
comment_set = set()
for item in json.loads(history[0].comment):
comment_set.add(json.dumps(item))
self.assertEqual(
comment_set,
{
'{"changed": {"fields": ["rules"]}}',
'{"added": {"fields": ["groupname"]}}'
}
)
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
)
def test_put_thresholds_profile_without_groupname_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '12341234-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles without '
'assigned group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp2.id)
self.assertEqual(tp.name, 'ANOTHER_PROFILE')
self.assertEqual(tp.groupname, '')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
self.assertFalse(
group1.thresholdsprofiles.filter(
apiid='12341234-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'metric': 'metricB',
'thresholds': 'freshness=1s;10;9:;0;25'
}
]
)
def test_put_thresholds_profile_without_groupname_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '12341234-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp2.id)
self.assertEqual(tp.name, 'ANOTHER_PROFILE')
self.assertEqual(tp.groupname, '')
group1 = poem_models.GroupOfThresholdsProfiles.objects.get(
name='NEWGROUP'
)
self.assertFalse(
group1.thresholdsprofiles.filter(
apiid='12341234-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'metric': 'metricB',
'thresholds': 'freshness=1s;10;9:;0;25'
}
]
)
def test_put_thresholds_profile_with_invalid_data_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'Apiid field should be specified.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_invalid_data_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'Apiid field should be specified.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_invalid_data_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_nonexisting_profile_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '99999999-9999-9999-9999-999999999999',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile does not exist.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_nonexisting_profile_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '99999999-9999-9999-9999-999999999999',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile does not exist.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_nonexisting_profile_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '99999999-9999-9999-9999-999999999999',
'groupname': 'NEWGROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_nonexisting_group_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'nonexisting',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'],
'Group of thresholds profiles does not exist.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'GROUP')
group = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
)
def test_put_thresholds_profile_with_nonexisting_group_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'nonexisting',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'],
'Group of thresholds profiles does not exist.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'GROUP')
group = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
)
def test_put_thresholds_profile_with_nonexisting_group_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'groupname': 'nonexisting',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
tp = poem_models.ThresholdsProfiles.objects.get(id=self.tp1.id)
self.assertEqual(tp.name, 'TEST_PROFILE')
self.assertEqual(tp.groupname, 'GROUP')
group = poem_models.GroupOfThresholdsProfiles.objects.get(
name='GROUP'
)
self.assertTrue(
group.thresholdsprofiles.filter(
apiid='00000000-oooo-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=tp.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], tp.name)
self.assertEqual(serialized_data['groupname'], tp.groupname)
self.assertEqual(serialized_data['apiid'], tp.apiid)
self.assertEqual(
serialized_data['rules'],
[
{
'host': 'hostFoo',
'metric': 'metricA',
'thresholds': 'freshness=1s;10;9:;0;25 entries=1;3;0:2;10'
}
]
)
def test_put_thresholds_profile_with_missing_data_key_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'Missing data key: groupname'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_missing_data_key_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'Missing data key: groupname'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_put_thresholds_profile_with_missing_data_key_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_TEST_PROFILE',
'apiid': '00000000-oooo-kkkk-aaaa-aaeekkccnnee',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
content, content_type = encode_data(data)
request = self.factory.put(self.url, content, content_type=content_type)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to change thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_post_thresholds_profile_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 4
)
profile = poem_models.ThresholdsProfiles.objects.get(
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
self.assertEqual(profile.name, 'NEW_PROFILE')
self.assertEqual(profile.groupname, 'GROUP')
group = poem_models.GroupOfThresholdsProfiles.objects.get(name='GROUP')
self.assertTrue(
group.thresholdsprofiles.filter(
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=profile.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], profile.name)
self.assertEqual(serialized_data['apiid'], profile.apiid)
self.assertEqual(serialized_data['groupname'], profile.groupname)
self.assertEqual(
serialized_data['rules'],
'[{"host": "newHost", "metric": "newMetric", '
'"thresholds": "entries=1;3;0:2;10"}]'
)
def test_post_thresholds_profile_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 4
)
profile = poem_models.ThresholdsProfiles.objects.get(
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
self.assertEqual(profile.name, 'NEW_PROFILE')
self.assertEqual(profile.groupname, 'GROUP')
group = poem_models.GroupOfThresholdsProfiles.objects.get(name='GROUP')
self.assertTrue(
group.thresholdsprofiles.filter(
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=profile.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], profile.name)
self.assertEqual(serialized_data['apiid'], profile.apiid)
self.assertEqual(serialized_data['groupname'], profile.groupname)
self.assertEqual(
serialized_data['rules'],
'[{"host": "newHost", "metric": "newMetric", '
'"thresholds": "entries=1;3;0:2;10"}]'
)
def test_post_thresholds_profile_user_wrong_group(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'GROUP2',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to assign thresholds profiles to the '
'given group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'GROUP2',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to add thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_with_nonexisting_group_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'nonexisting',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'],
'Group of thresholds profiles does not exist.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_with_nonexisting_group_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'nonexisting',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'],
'Group of thresholds profiles does not exist.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_with_nonexisting_group_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': 'nonexisting',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to add thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_with_empty_group_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': '',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 4
)
profile = poem_models.ThresholdsProfiles.objects.get(
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
self.assertEqual(profile.name, 'NEW_PROFILE')
self.assertEqual(profile.groupname, '')
group = poem_models.GroupOfThresholdsProfiles.objects.get(name='GROUP')
self.assertFalse(
group.thresholdsprofiles.filter(
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
).exists()
)
history = poem_models.TenantHistory.objects.filter(
object_id=profile.id, content_type=self.ct
).order_by('-date_created')
self.assertEqual(history.count(), 1)
self.assertEqual(history[0].comment, 'Initial version.')
serialized_data = json.loads(history[0].serialized_data)[0]['fields']
self.assertEqual(serialized_data['name'], profile.name)
self.assertEqual(serialized_data['apiid'], profile.apiid)
self.assertEqual(serialized_data['groupname'], profile.groupname)
self.assertEqual(
serialized_data['rules'],
'[{"host": "newHost", "metric": "newMetric", '
'"thresholds": "entries=1;3;0:2;10"}]'
)
def test_post_thresholds_profile_with_empty_group_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': '',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to assign thresholds profiles to the '
'given group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_with_empty_group_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '12341234-aaaa-kkkk-aaaa-aaeekkccnnee',
'groupname': '',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to add thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
apiid='12341234-aaaa-kkkk-aaaa-aaeekkccnnee'
)
def test_post_thresholds_profile_with_invalid_data_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'apiid: This field may not be blank.'
)
def test_post_thresholds_profile_with_invalid_data_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'apiid: This field may not be blank.'
)
def test_post_thresholds_profile_with_invalid_data_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
data = {
'name': 'NEW_PROFILE',
'apiid': '',
'groupname': 'GROUP',
'rules': json.dumps([
{
'host': 'newHost',
'metric': 'newMetric',
'thresholds': 'entries=1;3;0:2;10'
}
])
}
request = self.factory.post(self.url, data, format='json')
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to add thresholds profiles.'
)
def test_delete_thresholds_profile_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp1.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + '00000000-oooo-kkkk-aaaa-aaeekkccnnee'
)
force_authenticate(request, user=self.superuser)
response = self.view(request, '00000000-oooo-kkkk-aaaa-aaeekkccnnee')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 2
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
id=self.tp1.id
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp1.id, content_type=self.ct
).count(), 0
)
def test_delete_thresholds_profile_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp1.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + '00000000-oooo-kkkk-aaaa-aaeekkccnnee'
)
force_authenticate(request, user=self.user)
response = self.view(request, '00000000-oooo-kkkk-aaaa-aaeekkccnnee')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 2
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
id=self.tp1.id
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp1.id, content_type=self.ct
).count(), 0
)
def test_delete_thresholds_profile_user_wrong_group(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp3.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a'
)
force_authenticate(request, user=self.user)
response = self.view(request, 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to delete thresholds profiles assigned '
'to this group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
profile = poem_models.ThresholdsProfiles.objects.get(id=self.tp3.id)
assert profile
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp3.id, content_type=self.ct
).count(), 1
)
def test_delete_thresholds_profile_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp3.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a'
)
force_authenticate(request, user=self.limited_user)
response = self.view(request, 'piequ8ja-gj3z-9tai-2rlt-uuroth4lis1a')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to delete thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
profile = poem_models.ThresholdsProfiles.objects.get(id=self.tp3.id)
assert profile
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp3.id, content_type=self.ct
).count(), 1
)
def test_delete_thresholds_profile_with_empty_group_superuser(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp2.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + '12341234-oooo-kkkk-aaaa-aaeekkccnnee'
)
force_authenticate(request, user=self.superuser)
response = self.view(request, '12341234-oooo-kkkk-aaaa-aaeekkccnnee')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 2
)
self.assertRaises(
poem_models.ThresholdsProfiles.DoesNotExist,
poem_models.ThresholdsProfiles.objects.get,
id=self.tp2.id
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp2.id, content_type=self.ct
).count(), 0
)
def test_delete_thresholds_profile_with_empty_group_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp2.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + '12341234-oooo-kkkk-aaaa-aaeekkccnnee'
)
force_authenticate(request, user=self.user)
response = self.view(request, '12341234-oooo-kkkk-aaaa-aaeekkccnnee')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to delete thresholds profiles without '
'assigned group.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
profile = poem_models.ThresholdsProfiles.objects.get(id=self.tp2.id)
assert profile
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp2.id, content_type=self.ct
).count(), 1
)
def test_delete_thresholds_profile_with_empty_group_limited_user(self):
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp2.id, content_type=self.ct
).count(), 1
)
request = self.factory.delete(
self.url + '12341234-oooo-kkkk-aaaa-aaeekkccnnee'
)
force_authenticate(request, user=self.limited_user)
response = self.view(request, '12341234-oooo-kkkk-aaaa-aaeekkccnnee')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to delete thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
profile = poem_models.ThresholdsProfiles.objects.get(id=self.tp2.id)
assert profile
self.assertEqual(
poem_models.TenantHistory.objects.filter(
object_id=self.tp2.id, content_type=self.ct
).count(), 1
)
def test_delete_nonexisting_thresholds_profile_superuser(self):
request = self.factory.delete(self.url + 'nonexisting')
force_authenticate(request, user=self.superuser)
response = self.view(request, 'nonexisting')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile not found.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_delete_nonexisting_thresholds_profile_user(self):
request = self.factory.delete(self.url + 'nonexisting')
force_authenticate(request, user=self.user)
response = self.view(request, 'nonexisting')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(
response.data['detail'], 'Thresholds profile not found.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_delete_nonexisting_thresholds_profile_limited_user(self):
request = self.factory.delete(self.url + 'nonexisting')
force_authenticate(request, user=self.limited_user)
response = self.view(request, 'nonexisting')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to delete thresholds profiles.'
)
self.assertEqual(
poem_models.ThresholdsProfiles.objects.all().count(), 3
)
def test_delete_thresholds_profile_without_specifying_apiid_superuser(self):
request = self.factory.delete(self.url)
force_authenticate(request, user=self.superuser)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'Thresholds profile not specified.'
)
def test_delete_thresholds_profile_without_specifying_apiid_user(self):
request = self.factory.delete(self.url)
force_authenticate(request, user=self.user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.data['detail'], 'Thresholds profile not specified.'
)
def test_delete_thresholds_profile_without_specifying_apiid_limtd_usr(self):
request = self.factory.delete(self.url)
force_authenticate(request, user=self.limited_user)
response = self.view(request)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(
response.data['detail'],
'You do not have permission to delete thresholds profiles.'
)
| 38.923345
| 80
| 0.570572
| 7,446
| 78,197
| 5.82635
| 0.029412
| 0.105456
| 0.080677
| 0.091972
| 0.972893
| 0.970219
| 0.962912
| 0.954775
| 0.950949
| 0.944909
| 0
| 0.027701
| 0.313976
| 78,197
| 2,008
| 81
| 38.942729
| 0.781005
| 0
| 0
| 0.737468
| 0
| 0.003618
| 0.159303
| 0.050577
| 0
| 0
| 0
| 0
| 0.174677
| 1
| 0.029457
| false
| 0
| 0.006718
| 0
| 0.036693
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb41742055925371e81c3b8542902033010b670b
| 17,290
|
py
|
Python
|
AppPkg/Applications/Python/Python-2.7.2/Tools/pybench/Numbers.py
|
CEOALT1/RefindPlusUDK
|
116b957ad735f96fbb6d80a0ba582046960ba164
|
[
"BSD-2-Clause"
] | 2,757
|
2018-04-28T21:41:36.000Z
|
2022-03-29T06:33:36.000Z
|
AppPkg/Applications/Python/Python-2.7.2/Tools/pybench/Numbers.py
|
CEOALT1/RefindPlusUDK
|
116b957ad735f96fbb6d80a0ba582046960ba164
|
[
"BSD-2-Clause"
] | 20
|
2019-07-23T15:29:32.000Z
|
2022-01-21T12:53:04.000Z
|
AppPkg/Applications/Python/Python-2.7.2/Tools/pybench/Numbers.py
|
CEOALT1/RefindPlusUDK
|
116b957ad735f96fbb6d80a0ba582046960ba164
|
[
"BSD-2-Clause"
] | 449
|
2018-05-09T05:54:05.000Z
|
2022-03-30T14:54:18.000Z
|
from pybench import Test
class CompareIntegers(Test):
version = 2.0
operations = 30 * 5
rounds = 120000
def test(self):
for i in xrange(self.rounds):
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
2 < 3
2 > 3
2 == 3
2 > 3
2 < 3
def calibrate(self):
for i in xrange(self.rounds):
pass
class CompareFloats(Test):
version = 2.0
operations = 30 * 5
rounds = 80000
def test(self):
for i in xrange(self.rounds):
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
2.1 < 3.31
2.1 > 3.31
2.1 == 3.31
2.1 > 3.31
2.1 < 3.31
def calibrate(self):
for i in xrange(self.rounds):
pass
class CompareFloatsIntegers(Test):
version = 2.0
operations = 30 * 5
rounds = 60000
def test(self):
for i in xrange(self.rounds):
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
2.1 < 4
2.1 > 4
2.1 == 4
2.1 > 4
2.1 < 4
def calibrate(self):
for i in xrange(self.rounds):
pass
class CompareLongs(Test):
version = 2.0
operations = 30 * 5
rounds = 70000
def test(self):
for i in xrange(self.rounds):
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
1234567890L < 3456789012345L
1234567890L > 3456789012345L
1234567890L == 3456789012345L
1234567890L > 3456789012345L
1234567890L < 3456789012345L
def calibrate(self):
for i in xrange(self.rounds):
pass
| 22.025478
| 42
| 0.335512
| 1,774
| 17,290
| 3.270011
| 0.019166
| 0.10343
| 0.077573
| 0.129288
| 0.981383
| 0.981383
| 0.981383
| 0.981383
| 0.959317
| 0.953629
| 0
| 0.707041
| 0.591729
| 17,290
| 784
| 43
| 22.053571
| 0.114747
| 0
| 0
| 0.985871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.006279
| 0.00157
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
cb704bfd5d9165734b603cd18d63c67869dd2343
| 24,300
|
py
|
Python
|
deepchem/feat/complex_featurizers/grid_featurizers.py
|
lilleswing/deepchem
|
355954b37d333fc3e2c3b3e28d103297eb642769
|
[
"MIT"
] | 3
|
2019-05-29T19:18:25.000Z
|
2021-01-25T05:44:05.000Z
|
deepchem/feat/complex_featurizers/grid_featurizers.py
|
lilleswing/deepchem
|
355954b37d333fc3e2c3b3e28d103297eb642769
|
[
"MIT"
] | 10
|
2017-02-23T19:39:22.000Z
|
2017-08-31T22:21:18.000Z
|
deepchem/feat/complex_featurizers/grid_featurizers.py
|
lilleswing/deepchem
|
355954b37d333fc3e2c3b3e28d103297eb642769
|
[
"MIT"
] | 1
|
2018-09-22T00:53:53.000Z
|
2018-09-22T00:53:53.000Z
|
"""
Compute various spatial fingerprints for macromolecular complexes.
"""
import itertools
import logging
import numpy as np
from deepchem.utils import rdkit_utils
from deepchem.feat import ComplexFeaturizer
from deepchem.utils.voxel_utils import voxelize
from deepchem.utils.voxel_utils import convert_atom_to_voxel
from deepchem.utils.voxel_utils import convert_atom_pair_to_voxel
from deepchem.utils.noncovalent_utils import compute_salt_bridges
from deepchem.utils.noncovalent_utils import compute_binding_pocket_cation_pi
from deepchem.utils.noncovalent_utils import compute_pi_stack
from deepchem.utils.noncovalent_utils import compute_hydrogen_bonds
from deepchem.utils.rdkit_utils import MoleculeLoadException
from deepchem.utils.rdkit_utils import compute_contact_centroid
from deepchem.utils.geometry_utils import compute_pairwise_distances
from deepchem.utils.geometry_utils import subtract_centroid
from deepchem.utils.fragment_utils import get_partial_charge
from deepchem.utils.fragment_utils import reduce_molecular_complex_to_contacts
from typing import List, Tuple, Optional
logger = logging.getLogger(__name__)
HBOND_DIST_BINS = [(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)]
HBOND_ANGLE_CUTOFFS = [5., 50., 90.]
def compute_charge_dictionary(molecule):
"""Create a dictionary with partial charges for each atom in the molecule.
This function assumes that the charges for the molecule are
already computed (it can be done with
rdkit_util.compute_charges(molecule))
"""
charge_dictionary = {}
for i, atom in enumerate(molecule.GetAtoms()):
charge_dictionary[i] = get_partial_charge(atom)
return charge_dictionary
class ChargeVoxelizer(ComplexFeaturizer):
"""Localize partial charges of atoms in macromolecular complexes.
Given a macromolecular complex made up of multiple
constitutent molecules, compute the partial (Gasteiger
charge) on each molecule. For each atom, localize this
partial charge in the voxel in which it originated to create
a local charge array. Sum contributions to get an effective
charge at each voxel.
Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a
tensor output of shape `(voxels_per_edge, voxels_per_edge,
voxels_per_edge, 1)` for each macromolecular complex that computes
the effective charge at each voxel.
"""
def __init__(self,
cutoff: float = 4.5,
box_width: float = 16.0,
voxel_width: float = 1.0,
reduce_to_contacts: bool = True):
"""
Parameters
----------
cutoff: float (default 4.5)
Distance cutoff in angstroms for molecules in complex.
box_width: float, optional (default 16.0)
Size of a box in which voxel features are calculated. Box
is centered on a ligand centroid.
voxel_width: float, optional (default 1.0)
Size of a 3D voxel in a grid.
reduce_to_contacts: bool, optional
If True, reduce the atoms in the complex to those near a contact
region.
"""
self.cutoff = cutoff
self.box_width = box_width
self.voxel_width = voxel_width
self.reduce_to_contacts = reduce_to_contacts
def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
mol_pdb: str
Filename for ligand molecule
protein_pdb: str
Filename for protein molecule
"""
molecular_complex = (mol_pdb, protein_pdb)
try:
fragments = rdkit_utils.load_complex(
molecular_complex, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
if self.reduce_to_contacts:
fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff)
# We compute pairwise contact fingerprints
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
frag1_xyz = subtract_centroid(frag1[0], centroid)
frag2_xyz = subtract_centroid(frag2[0], centroid)
xyzs = [frag1_xyz, frag2_xyz]
rdks = [frag1[1], frag2[1]]
pairwise_features.append(
sum([
voxelize(
convert_atom_to_voxel,
hash_function=None,
coordinates=xyz,
box_width=self.box_width,
voxel_width=self.voxel_width,
feature_dict=compute_charge_dictionary(mol),
nb_channel=1,
dtype="np.float16") for xyz, mol in zip(xyzs, rdks)
]))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
class SaltBridgeVoxelizer(ComplexFeaturizer):
"""Localize salt bridges between atoms in macromolecular complexes.
Given a macromolecular complex made up of multiple
constitutent molecules, compute salt bridges between atoms in
the macromolecular complex. For each atom, localize this salt
bridge in the voxel in which it originated to create a local
salt bridge array. Note that if atoms in two different voxels
interact in a salt-bridge, the interaction is double counted
in both voxels.
Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a
tensor output of shape `(voxels_per_edge, voxels_per_edge,
voxels_per_edge, 1)` for each macromolecular the number of salt
bridges at each voxel.
"""
def __init__(self,
cutoff: float = 5.0,
box_width: float = 16.0,
voxel_width: float = 1.0,
reduce_to_contacts: bool = True):
"""
Parameters
----------
cutoff: float, optional (default 5.0)
The distance in angstroms within which atoms must be to
be considered for a salt bridge between them.
box_width: float, optional (default 16.0)
Size of a box in which voxel features are calculated. Box
is centered on a ligand centroid.
voxel_width: float, optional (default 1.0)
Size of a 3D voxel in a grid.
reduce_to_contacts: bool, optional
If True, reduce the atoms in the complex to those near a contact
region.
"""
self.cutoff = cutoff
self.box_width = box_width
self.voxel_width = voxel_width
self.reduce_to_contacts = reduce_to_contacts
def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
mol_pdb: str
Filename for ligand molecule
protein_pdb: str
Filename for protein molecule
"""
molecular_complex = (mol_pdb, protein_pdb)
try:
fragments = rdkit_utils.load_complex(
molecular_complex, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
if self.reduce_to_contacts:
fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff)
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
distances = compute_pairwise_distances(frag1[0], frag2[0])
frag1_xyz = subtract_centroid(frag1[0], centroid)
frag2_xyz = subtract_centroid(frag2[0], centroid)
xyzs = [frag1_xyz, frag2_xyz]
# rdks = [frag1[1], frag2[1]]
pairwise_features.append(
sum([
voxelize(
convert_atom_pair_to_voxel,
hash_function=None,
coordinates=xyz,
box_width=self.box_width,
voxel_width=self.voxel_width,
feature_list=compute_salt_bridges(
frag1[1], frag2[1], distances, cutoff=self.cutoff),
nb_channel=1) for xyz in xyzs
]))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
class CationPiVoxelizer(ComplexFeaturizer):
"""Localize cation-Pi interactions between atoms in macromolecular complexes.
Given a macromolecular complex made up of multiple
constitutent molecules, compute cation-Pi between atoms in
the macromolecular complex. For each atom, localize this salt
bridge in the voxel in which it originated to create a local
cation-pi array.
Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a
tensor output of shape `(voxels_per_edge, voxels_per_edge,
voxels_per_edge, 1)` for each macromolecular complex that counts the
number of cation-pi interactions at each voxel.
"""
def __init__(self,
cutoff: float = 6.5,
angle_cutoff: float = 30.0,
box_width: float = 16.0,
voxel_width: float = 1.0):
"""
Parameters
----------
cutoff: float, optional (default 6.5)
The distance in angstroms within which atoms must be to
be considered for a cation-pi interaction between them.
angle_cutoff: float, optional (default 30.0)
Angle cutoff. Max allowed deviation from the ideal (0deg)
angle between ring normal and vector pointing from ring
center to cation (in degrees).
box_width: float, optional (default 16.0)
Size of a box in which voxel features are calculated. Box
is centered on a ligand centroid.
voxel_width: float, optional (default 1.0)
Size of a 3D voxel in a grid.
"""
self.cutoff = cutoff
self.angle_cutoff = angle_cutoff
self.box_width = box_width
self.voxel_width = voxel_width
def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
mol_pdb: str
Filename for ligand molecule
protein_pdb: str
Filename for protein molecule
"""
molecular_complex = (mol_pdb, protein_pdb)
try:
fragments = rdkit_utils.load_complex(
molecular_complex, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
# distances = compute_pairwise_distances(frag1[0], frag2[0])
frag1_xyz = subtract_centroid(frag1[0], centroid)
frag2_xyz = subtract_centroid(frag2[0], centroid)
xyzs = [frag1_xyz, frag2_xyz]
# rdks = [frag1[1], frag2[1]]
pairwise_features.append(
sum([
voxelize(
convert_atom_to_voxel,
hash_function=None,
box_width=self.box_width,
voxel_width=self.voxel_width,
coordinates=xyz,
feature_dict=cation_pi_dict,
nb_channel=1) for xyz, cation_pi_dict in zip(
xyzs,
compute_binding_pocket_cation_pi(
frag1[1],
frag2[1],
dist_cutoff=self.cutoff,
angle_cutoff=self.angle_cutoff,
))
]))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
class PiStackVoxelizer(ComplexFeaturizer):
"""Localize Pi stacking interactions between atoms in macromolecular complexes.
Given a macromolecular complex made up of multiple
constitutent molecules, compute pi-stacking interactions
between atoms in the macromolecular complex. For each atom,
localize this salt bridge in the voxel in which it originated
to create a local pi-stacking array.
Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a
tensor output of shape `(voxels_per_edge, voxels_per_edge,
voxels_per_edge, 2)` for each macromolecular complex. Each voxel has
2 fields, with the first tracking the number of pi-pi parallel
interactions, and the second tracking the number of pi-T
interactions.
"""
def __init__(self,
cutoff: float = 4.4,
angle_cutoff: float = 30.0,
box_width: float = 16.0,
voxel_width: float = 1.0):
"""
Parameters
----------
cutoff: float, optional (default 4.4)
The distance in angstroms within which atoms must be to
be considered for a cation-pi interaction between them.
angle_cutoff: float, optional (default 30.0)
Angle cutoff. Max allowed deviation from the ideal (0 deg)
angle between ring normal and vector pointing from ring
center to other ring center (in degrees).
box_width: float, optional (default 16.0)
Size of a box in which voxel features are calculated. Box
is centered on a ligand centroid.
voxel_width: float, optional (default 1.0)
Size of a 3D voxel in a grid.
"""
self.cutoff = cutoff
self.angle_cutoff = angle_cutoff
self.box_width = box_width
self.voxel_width = voxel_width
def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
mol_pdb: str
Filename for ligand molecule
protein_pdb: str
Filename for protein molecule
"""
molecular_complex = (mol_pdb, protein_pdb)
try:
fragments = rdkit_utils.load_complex(
molecular_complex, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
distances = compute_pairwise_distances(frag1[0], frag2[0])
frag1_xyz = subtract_centroid(frag1[0], centroid)
frag2_xyz = subtract_centroid(frag2[0], centroid)
xyzs = [frag1_xyz, frag2_xyz]
# rdks = [frag1[1], frag2[1]]
protein_pi_t, protein_pi_parallel, ligand_pi_t, ligand_pi_parallel = (
compute_pi_stack(
frag1[1],
frag2[1],
distances,
dist_cutoff=self.cutoff,
angle_cutoff=self.angle_cutoff))
pi_parallel_tensor = sum([
voxelize(
convert_atom_to_voxel,
hash_function=None,
box_width=self.box_width,
voxel_width=self.voxel_width,
coordinates=xyz,
feature_dict=feature_dict,
nb_channel=1)
for (xyz, feature_dict
) in zip(xyzs, [ligand_pi_parallel, protein_pi_parallel])
])
pi_t_tensor = sum([
voxelize(
convert_atom_to_voxel,
hash_function=None,
box_width=self.box_width,
voxel_width=self.voxel_width,
coordinates=frag1_xyz,
feature_dict=protein_pi_t,
nb_channel=1)
for (xyz, feature_dict) in zip(xyzs, [ligand_pi_t, protein_pi_t])
])
pairwise_features.append(
np.concatenate([pi_parallel_tensor, pi_t_tensor], axis=-1))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 2) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
class HydrogenBondCounter(ComplexFeaturizer):
"""Counts hydrogen bonds between atoms in macromolecular complexes.
Given a macromolecular complex made up of multiple
constitutent molecules, count the number of hydrogen bonds
between atoms in the macromolecular complex.
Creates a scalar output of shape `(3,)` (assuming the default value
ofor `distance_bins` with 3 bins) for each macromolecular complex
that computes the total number of hydrogen bonds.
"""
def __init__(
self,
cutoff: float = 4.5,
reduce_to_contacts: bool = True,
distance_bins: Optional[List[Tuple[float, float]]] = None,
angle_cutoffs: Optional[List[float]] = None,
):
"""
Parameters
----------
cutoff: float (default 4.5)
Distance cutoff in angstroms for molecules in complex.
reduce_to_contacts: bool, optional
If True, reduce the atoms in the complex to those near a contact
region.
distance_bins: list[tuple]
List of hydgrogen bond distance bins. If not specified is
set to default
`[(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)]`.
angle_cutoffs: list[float]
List of hydrogen bond angle cutoffs. Max allowed
deviation from the ideal (180 deg) angle between
hydrogen-atom1, hydrogen-atom2 vectors.If not specified
is set to default `[5, 50, 90]`
"""
self.cutoff = cutoff
if distance_bins is None:
self.distance_bins = HBOND_DIST_BINS
else:
self.distance_bins = distance_bins
if angle_cutoffs is None:
self.angle_cutoffs = HBOND_ANGLE_CUTOFFS
else:
self.angle_cutoffs = angle_cutoffs
self.reduce_to_contacts = reduce_to_contacts
def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
mol_pdb: str
Filename for ligand molecule
protein_pdb: str
Filename for protein molecule
"""
molecular_complex = (mol_pdb, protein_pdb)
try:
fragments = rdkit_utils.load_complex(
molecular_complex, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
# centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
if self.reduce_to_contacts:
fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff)
# We compute pairwise contact fingerprints
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
distances = compute_pairwise_distances(frag1[0], frag2[0])
# frag1_xyz = subtract_centroid(frag1[0], centroid)
# frag2_xyz = subtract_centroid(frag2[0], centroid)
# xyzs = [frag1_xyz, frag2_xyz]
# rdks = [frag1[1], frag2[1]]
pairwise_features.append(
np.concatenate(
[
np.array([len(hbond_list)])
for hbond_list in compute_hydrogen_bonds(
frag1, frag2, distances, self.distance_bins,
self.angle_cutoffs)
],
axis=-1))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
class HydrogenBondVoxelizer(ComplexFeaturizer):
"""Localize hydrogen bonds between atoms in macromolecular complexes.
Given a macromolecular complex made up of multiple
constitutent molecules, compute hydrogen bonds between atoms
in the macromolecular complex. For each atom, localize this
hydrogen bond in the voxel in which it originated to create a
local hydrogen bond array. Note that if atoms in two
different voxels interact in a hydrogen bond, the interaction
is double counted in both voxels.
Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a
tensor output of shape `(voxels_per_edge, voxels_per_edge,
voxels_per_edge, 3)` (assuming the default for `distance_bins` which
has 3 bins) for each macromolecular complex that counts the number
of hydrogen bonds at each voxel.
"""
def __init__(
self,
cutoff: float = 4.5,
box_width: float = 16.0,
voxel_width: float = 1.0,
reduce_to_contacts: bool = True,
distance_bins: Optional[List[Tuple[float, float]]] = None,
angle_cutoffs: Optional[List[float]] = None,
):
"""
Parameters
----------
cutoff: float (default 4.5)
Distance cutoff in angstroms for contact atoms in complex.
box_width: float, optional (default 16.0)
Size of a box in which voxel features are calculated. Box
is centered on a ligand centroid.
voxel_width: float, optional (default 1.0)
Size of a 3D voxel in a grid.
reduce_to_contacts: bool, optional
If True, reduce the atoms in the complex to those near a contact
region.
distance_bins: list[tuple]
List of hydgrogen bond distance bins. If not specified is
set to default
`[(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)]`.
angle_cutoffs: list[float]
List of hydrogen bond angle cutoffs. Max allowed
deviation from the ideal (180 deg) angle between
hydrogen-atom1, hydrogen-atom2 vectors.If not specified
is set to default `[5, 50, 90]`
"""
self.cutoff = cutoff
if distance_bins is None:
self.distance_bins = HBOND_DIST_BINS
else:
self.distance_bins = distance_bins
if angle_cutoffs is None:
self.angle_cutoffs = HBOND_ANGLE_CUTOFFS
else:
self.angle_cutoffs = angle_cutoffs
self.box_width = box_width
self.voxel_width = voxel_width
self.reduce_to_contacts = reduce_to_contacts
def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray:
"""
Compute featurization for a single mol/protein complex
Parameters
----------
mol_pdb: str
Filename for ligand molecule
protein_pdb: str
Filename for protein molecule
"""
molecular_complex = (mol_pdb, protein_pdb)
try:
fragments = rdkit_utils.load_complex(
molecular_complex, add_hydrogens=False)
except MoleculeLoadException:
logger.warning("This molecule cannot be loaded by Rdkit. Returning None")
return None
pairwise_features = []
# We compute pairwise contact fingerprints
centroid = compute_contact_centroid(fragments, cutoff=self.cutoff)
if self.reduce_to_contacts:
fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff)
for (frag1_ind, frag2_ind) in itertools.combinations(
range(len(fragments)), 2):
frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind]
distances = compute_pairwise_distances(frag1[0], frag2[0])
frag1_xyz = subtract_centroid(frag1[0], centroid)
frag2_xyz = subtract_centroid(frag2[0], centroid)
xyzs = [frag1_xyz, frag2_xyz]
# rdks = [frag1[1], frag2[1]]
pairwise_features.append(
np.concatenate(
[
sum([
voxelize(
convert_atom_pair_to_voxel,
hash_function=None,
box_width=self.box_width,
voxel_width=self.voxel_width,
coordinates=xyz,
feature_list=hbond_list,
nb_channel=1) for xyz in xyzs
]) for hbond_list in compute_hydrogen_bonds(
frag1, frag2, distances, self.distance_bins,
self.angle_cutoffs)
],
axis=-1))
# Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis.
return np.concatenate(pairwise_features, axis=-1)
| 38.328076
| 125
| 0.674074
| 3,119
| 24,300
| 5.058352
| 0.084001
| 0.021677
| 0.031311
| 0.026494
| 0.879635
| 0.857894
| 0.837485
| 0.817456
| 0.803638
| 0.793624
| 0
| 0.018499
| 0.250329
| 24,300
| 633
| 126
| 38.388626
| 0.84756
| 0.406214
| 0
| 0.789308
| 0
| 0
| 0.025077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040881
| false
| 0
| 0.059748
| 0
| 0.160377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb7310499aa25bde11165b19518532bb5cfd8972
| 102,621
|
py
|
Python
|
preprocessor/legacy_functions/demo_daily_data.py
|
clokman/KFIR
|
01c9bad491aa5c104adce38294ee2b15bd49b7ec
|
[
"MIT"
] | 1
|
2021-12-20T03:23:42.000Z
|
2021-12-20T03:23:42.000Z
|
preprocessor/legacy_functions/demo_daily_data.py
|
clokman/KFIR
|
01c9bad491aa5c104adce38294ee2b15bd49b7ec
|
[
"MIT"
] | null | null | null |
preprocessor/legacy_functions/demo_daily_data.py
|
clokman/KFIR
|
01c9bad491aa5c104adce38294ee2b15bd49b7ec
|
[
"MIT"
] | 1
|
2022-03-23T08:37:03.000Z
|
2022-03-23T08:37:03.000Z
|
demo_daily_data = [['date', 'id', 'bed_time', 'late', 'late_reason', 'wake_time', 'sleep_transition', 'sleep_struggle', 'night_wake', 'wake_earlier', 'wake_earlier_problem', 'sleep_quality', 'physical_activity', 'mental_digital_activity', 'social_activity', 'light', 'presleep_description', 'temptation_smoking', 'temptation_eating', 'temptation_chat', 'temptation_coffee', 'temptation_social_media', 'temptation_internet', 'temptation_tv', 'temptation_alcohol', 'temptation_soft_drink', 'temptation_cleaning', 'temptation_shopping', 'temptation_other', 'bed_time_plan', 'steps', 'sun_hours'], ['2017/04/10 9:57:26 a.m. EET', 'AB64', '02:00', 'Ja', 'We zijn vanuit het noorden van het land teruggereden naar huis na een theatervoorstelling', '09:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'Theaterbezoek, autorit (bijrijder)', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 5845, 12.3], ['2017/04/11 11:25:39 p.m. EET', 'AB64', '23:45', 'Ja', '', '07:00', 'binnen een kwartier', 'Nee', '6 keer of meer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'Tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 7649, 8.8], ['2017/04/12 7:42:47 a.m. EET', 'AB64', '23:55', 'Ja', 'Man kwam thuis en daar wilde ik nog even mee praten', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Matig', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Meer dan een uur', 'Poging om Netflix te kijken maar was te moe, praten met huisgenoten, ', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 5012, 10.7], ['2017/04/13 8:26:21 a.m. EET', 'AB64', '00:30', 'Ja', 'Tot laat doorgewerkt.', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Zeer actief', '3-mei', 'Meer dan een uur', 'Gewerkt, achter de pc', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2467, 1.3], ['2017/04/14 1:10:28 p.m. EET', 'AB64', '00:00', 'Nee', 'nvt', '09:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'tv gekeken, ipad', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 2114, 7.2], ['2017/04/15 5:04:43 p.m. EET', 'AB64', '01:00', 'Ja', 'Nog even gezellig napraten, en in eens was het 1 uur.', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'Ipad, tv-serie kijken, gezellig kletsen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 2943, 4.4], ['2017/04/16 8:16:39 a.m. EET', 'AB64', '23:30', 'Nee', 'Nvt', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een uur', 'Even rustig zitten samen, de dag doorpraten.', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 2288, 5.8], ['2017/04/17 8:11:53 a.m. EET', 'AB64', '23:30', 'Nee', 'Nvt', '07:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Minder dan een uur', 'Televisie gekeken, boek gelezen, op de bank gezetten, was doen.', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3015, 5.3], ['2017/04/18 8:41:36 a.m. EET', 'AB64', '00:30', 'Ja', 'Geen bijzondere reden. ', '08:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een uur', 'Televisie kijken, lezen, ipad, gesprek voeren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 3866, 9.4], ['2017/04/19 5:17:55 p.m. EET', 'AB64', '01:00', 'Ja', 'Ik ben naar een verjaardag geweest.', '07:00', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Minder dan een kwartier', 'Verjaardag: kletsen met allerlei mensen, 10 minuten buiten lopen, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 4312, 9.0], ['2017/04/20 5:18:27 p.m. EET', 'AB64', '00:30', 'Ja', 'Geen bijzondere reden. ', '08:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een uur', 'Televisie kijken, lezen, ipad, gesprek voeren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 2987, 6.3], ['2017/04/21 7:31:53 a.m. EET', 'AB64', '22:30', 'Nee', 'N.v.t.', '07:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '3-mei', 'Meer dan een uur', 'Serie kijken, lezen in bed', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 2231, 12.5], ['2017/04/22 7:25:50 a.m. EET', 'AB64', '00:00', 'Ja', 'Geen specifieke reden, soms loopt het gewoon zo en dan is het later dan je dacht.', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Zeer actief', '10 of meer', 'Meer dan een uur', 'Tv kijken, lezen, praten met huisgenoten', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 2177, 0.7], ['2017/04/23 9:04:02 p.m. EET', 'AB64', '23:50', 'Ja', 'In slaap gevallen voor tv', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'Tv kijken, praten met de kinderen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', '23:00', 5604, 6.3], ['2017/04/10 8:43:22 p.m. EET', 'EM11', '21:30', 'Nee', '', '06:20', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Licht actief', 'Nauwelijks actief', '6-sep', 'Minder dan een kwartier', 'Eten, opruimen, badkamer, massage', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:30', 6897, 12.3], ['2017/04/11 8:21:13 p.m. EET', 'EM11', '22:15', 'Ja', 'Bad gevolgd bij een heerlijke vrij partij;)', '06:30', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Matig', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een half uur', 'Eten, computer, bad, seks', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:30', 8722, 5.3], ['2017/04/12 7:14:51 p.m. EET', 'EM11', '21:40', 'Nee', '', '07:00', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een uur', 'Eten, huishouden, film', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:00', 3389, 8.6], ['2017/04/13 9:32:23 a.m. EET', 'EM11', '21:15', 'Nee', '', '05:50', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een kwartier', 'Koken, opruimen, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:00', 2905, 0.9], ['2017/04/14 5:40:51 p.m. EET', 'EM11', '22:00', 'Nee', '', '06:45', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Nauwelijks actief', 'Nauwelijks actief', '3-mei', 'Meer dan een uur', 'Lezen, eten, opruimen, film kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:30', 3487, 7.7], ['2017/04/15 7:13:30 a.m. EET', 'EM11', '23:15', 'Ja', 'Avond met vrienden liep uit', '06:10', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Helemaal niet', 'Goed', 'Licht actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'Praten met vrienden, fietsen, eten, opruimen, vrijen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:00', 5633, 2.4], ['2017/04/16 7:00:48 a.m. EET', 'EM11', '21:15', 'Nee', '', '06:15', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Enigszins', 'Matig', 'Licht actief', 'Licht actief', '6-sep', 'Minder dan een kwartier', 'Koken, eten, wandelen, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:00', 8149, 4.7], ['2017/04/17 7:18:49 a.m. EET', 'EM11', '22:00', 'Nee', '', '06:30', 'binnen een kwartier', 'Nee', '0 keer', 'Ja', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'Zakelijke gesprekken, eten, sociaal', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:30', 7739, 3.4], ['2017/04/18 7:05:45 a.m. EET', 'EM11', '22:15', 'Ja', 'Vrijen;)', '06:10', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een half uur', 'Koken, opruimen, lezen, praten, vrijen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:30', 6492, 7.4], ['2017/04/19 1:50:06 p.m. EET', 'EM11', '22:00', 'Ja', 'Bad', '06:30', 'binnen een kwartier', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Licht actief', 'Nauwelijks actief', '6-sep', 'Minder dan een half uur', 'Eten, opruimen, wandelen, tandarts, bad', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:30', 8606, 9.2], ['2017/04/20 11:08:22 a.m. EET', 'EM11', '21:30', 'Nee', '', '06:10', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Minder dan een kwartier', 'Sociaal, winkelen, eten, huishouden, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 2544, 7.1], ['2017/04/21 7:59:14 a.m. EET', 'EM11', '01:00', 'Ja', 'Soul party', '07:00', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Ja', 'Matig', 'Vrij actief', 'Nauwelijks actief', '3-mei', 'Meer dan een uur', 'Dansen, drinken, fietsen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:00', 20491, 12.3], ['2017/04/22 1:19:46 p.m. EET', 'EM11', '21:15', 'Ja', 'Vakantie planning', '06:20', 'binnen een half uur', 'Ja', '2-3 keer', 'Ja', 'Ja', 'Slecht', 'Licht actief', 'Vrij actief', '6-sep', 'Minder dan een half uur', 'Wandelen, eten, huishouden, computer', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:00', 8714, 0.4], ['2017/04/23 11:56:03 a.m. EET', 'EM11', '21:15', 'Ja', 'Voorbereiding vakantie', '06:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Vrij actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'Eten, huishouden, wandelen, computer, vakantie voorbereidingen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:00', 15697, 7.8], ['2017/04/24 11:09:48 a.m. EET', 'EM11', '23:00', 'Ja', 'vrijen:) de laatste keer voor we het klooster in gaan:(', '07:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Licht actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'Wandelen, bad, seks', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 6879, 5.5], ['2017/04/10 9:53:22 p.m. EET', 'FT12', '23:15', 'Nee', '', '07:30', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '6-sep', 'Minder dan een uur', 'wandelen, bad, TV', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 4309, 12.3], ['2017/04/11 9:58:21 p.m. EET', 'FT12', '22:00', 'Nee', '', '07:30', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Matig', 'Zeer actief', 'Nauwelijks actief', '6-sep', 'Minder dan een kwartier', 'bad, massage, sex', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 19412, 5.3], ['2017/04/12 5:22:12 p.m. EET', 'FT12', '22:15', 'Nee', '', '07:30', 'binnen een half uur', 'Nee', '1 keer', 'Ja', 'Enigszins', 'Matig', 'Nauwelijks actief', 'Nauwelijks actief', '6-sep', 'Meer dan een uur', 'eten, film kijken, ebook lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:00', 1785, 8.6], ['2017/04/13 7:40:53 a.m. EET', 'FT12', '22:15', 'Nee', '', '07:15', 'binnen een uur', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Vrij actief', 'Vrij actief', '3-mei', 'Minder dan een kwartier', 'eten, fietsen, bezoek', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 10256, 0.9], ['2017/04/14 11:19:12 a.m. EET', 'FT12', '00:00', 'Ja', 'TV', '09:15', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'studie, TV', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 5644, 7.7], ['2017/04/15 7:31:35 a.m. EET', 'FT12', '00:00', 'Ja', 'TV', '07:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'Studie, TV', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 8838, 2.4], ['2017/04/16 12:25:19 p.m. EET', 'FT12', '00:15', 'Ja', 'TV', '08:30', 'binnen een half uur', 'Ja', '1 keer', 'Ja', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Minder dan een uur', 'Studie, TV', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 4596, 4.7], ['2017/04/17 11:58:03 a.m. EET', 'FT12', '01:30', 'Ja', 'feest', '08:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Vrij actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'dansen, fietsen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 14091, 3.4], ['2017/04/18 8:47:49 a.m. EET', 'FT12', '23:15', 'Nee', '', '08:30', 'binnen een half uur', 'Ja', '2-3 keer', 'Ja', 'Enigszins', 'Matig', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'administratie, studie, TV', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 2467, 7.4], ['2017/04/19 11:00:22 p.m. EET', 'FT12', '00:00', 'Ja', 'TV', '07:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '6-sep', 'Meer dan een uur', 'eten, TV, PC', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '23:30', 3942, 9.2], ['2017/04/20 11:39:48 a.m. EET', 'FT12', '00:00', 'Ja', 'mail', '08:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '1-feb', 'Minder dan een uur', 'bad, sex, mail', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 6841, 7.1], ['2017/05/21 9:32:32 a.m. EET', 'FT12', '21:30', 'Nee', '', '07:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Licht actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'wandelen, tv, pc', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:00', 5599, 12.3], ['2017/05/22 9:35:21 a.m. EET', 'FT12', '22:30', 'Ja', '', '07:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'eten, tv, pc', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 9176, 0.4], ['2017/05/23 9:38:03 a.m. EET', 'FT12', '01:00', 'Ja', 'studie', '08:00', 'binnen een half uur', 'Ja', '2-3 keer', 'Ja', 'Enigszins', 'Slecht', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Meer dan een uur', 'studie, bezoek, surfen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:00', 3981, 7.8], ['2017/04/10 10:48:09 p.m. EET', 'gh93', '03:00', 'Ja', '', '14:30', 'binnen een uur', 'Ja', '2-3 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Zeer actief', '10 of meer', 'Meer dan een uur', 'lezen; muziek studiotechniek;', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '05:00', 5945, 12.3], ['2017/04/11 1:28:39 a.m. EET', 'gh93', '01:15', 'Nee', '', '12:25', 'binnen een half uur', 'Nee', '4-5 keer', 'Nee', 'Helemaal niet', 'Goed', 'Zeer actief', 'Nauwelijks actief', '6-sep', 'Minder dan een half uur', 'muziek;seks', 'Toegegeven aan verleiding', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:30', 22341, 8.8], ['2017/04/12 9:51:36 p.m. EET', 'gh93', '02:30', 'Nee', '', '12:15', 'na meer dan een uur', 'Ja', '1 keer', 'Nee', 'Helemaal niet', 'Matig', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Minder dan een half uur', 'muziek;lezen;seks', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '02:00', 3649, 10.7], ['2017/04/13 12:42:19 a.m. EET', 'gh93', '05:00', 'Nee', 'vrouwen', '13:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Vrij actief', 'Licht actief', '6-sep', 'Minder dan een kwartier', 'film;seks', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 11915, 1.3], ['2017/04/14 11:19:56 p.m. EET', 'gh93', '04:30', 'Nee', '', '11:00', 'binnen een uur', 'Ja', '2-3 keer', 'Ja', 'Helemaal niet', 'Matig', 'Vrij actief', 'Nauwelijks actief', '3-mei', 'Minder dan een kwartier', 'muziek;seks', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 16744, 7.2], ['2017/04/15 12:11:02 a.m. EET', 'gh93', '03:30', 'Nee', '', '12:15', 'na meer dan een uur', 'Ja', '1 keer', 'Nee', 'Helemaal niet', 'Slecht', 'Licht actief', 'Licht actief', '10 of meer', 'Minder dan een half uur', 'muziek;lezen', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 2206, 4.4], ['2017/04/16 1:20:54 a.m. EET', 'gh93', '03:30', 'Nee', '', '11:45', 'binnen een uur', 'Ja', '2-3 keer', 'Nee', 'Helemaal niet', 'Matig', 'Vrij actief', 'Licht actief', '10 of meer', 'Minder dan een half uur', 'muziek;seks;lezen', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 12947, 5.8], ['2017/04/17 1:03:41 a.m. EET', 'Gh93', '04:00', 'Nee', '', '13:00', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Vrij actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'Lezen;film;muziek;seks', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 16402, 5.3], ['2017/04/18 2:40:54 a.m. EET', 'gh93', '01:00', 'Nee', '', '08:00', 'binnen een half uur', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Goed', 'Vrij actief', 'Licht actief', '10 of meer', 'Minder dan een half uur', 'Kinderen;lezen;huishouden', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 19361, 9.4], ['2017/04/19 1:41:33 a.m. EET', 'gh93', '04:30', 'Nee', '', '11:00', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Vrij actief', 'Vrij actief', '10 of meer', 'Minder dan een kwartier', 'muziek;lezen;seks', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 14363, 9.0], ['2017/04/20 3:04:09 a.m. EET', 'gh93', '04:30', 'Nee', '', '13:00', 'binnen een uur', 'Ja', '1 keer', 'Nee', 'Helemaal niet', 'Matig', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Minder dan een kwartier', 'lezen', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '04:00', 4682, 6.3], ['2017/04/21 4:22:56 a.m. EET', 'Gh93', '03:30', 'Nee', '', '13:00', 'binnen een uur', 'Ja', '2-3 keer', 'Nee', 'Helemaal niet', 'Matig', 'Vrij actief', 'Licht actief', '10 of meer', 'Minder dan een kwartier', 'Seks;muziek;lezen', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '04:30', 11877, 12.5], ['2017/04/22 4:30:24 a.m. EET', 'gh93', '05:00', 'Nee', '', '14:30', 'binnen een uur', 'Ja', '4-5 keer', 'Nee', 'Helemaal niet', 'Slecht', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Minder dan een half uur', 'muziek;lezen', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '04:30', 6904, 0.7], ['2017/04/23 2:10:27 a.m. EET', 'gh93', '04:30', 'Nee', '', '12:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een kwartier', 'lezen', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 2987, 6.3], ['2017/04/10 12:30:33 a.m. EET', 'GW98', '03:00', 'Ja', 'Gamen', '10:00', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '02:00', 4733, 12.3], ['2017/04/11 12:30:34 a.m. EET', 'GW98', '03:00', 'Ja', 'Gamen', '10:00', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '02:00', 2902, 6.2], ['2017/04/12 3:20:10 a.m. EET', 'GW98', '04:00', 'Ja', 'Tijd voor mezelf ', '13:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 3794, 10.3], ['2017/04/13 7:01:03 a.m. EET', 'GW98', '02:45', 'Nee', '', '12:00', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Vrij actief', 'Vrij actief', '10 of meer', 'Minder dan een half uur', 'Feestje, opruimen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '05:00', 12903, 0.9], ['2017/04/14 1:19:25 a.m. EET', 'GW98', '07:00', 'Ja', 'Game tijd na een hele dag werken', '14:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '00:00', 8421, 6.6], ['2017/04/15 2:58:29 a.m. EET', 'GW98', '01:15', 'Ja', 'Geen slaap', '05:10', 'binnen een half uur', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'TV kijken ', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '02:00', 6918, 4.8], ['2017/04/16 5:11:22 a.m. EET', 'GW98', '03:00', 'Ja', 'Vrije dag vandaag', '13:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'Bankhangen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '03:00', 6787, 4.5], ['2017/04/17 4:44:45 a.m. EET', 'GW98', '05:30', 'Ja', 'Gamen in eigen tijd', '12:40', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'Gamen op de pc', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '02:00', 5735, 2.9], ['2017/04/18 2:22:29 a.m. EET', 'GW98', '05:00', 'Nee', '', '14:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Zeer actief', 'Zeer actief', '10 of meer', 'Meer dan een uur', 'Uitgaan, alcohol, muziek, fietsen ', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 17114, 9.9], ['2017/04/19 3:40:42 a.m. EET', 'GW98', '02:00', 'Ja', 'Laat thuis', '05:15', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Matig', 'Zeer actief', 'Vrij actief', '10 of meer', 'Minder dan een uur', 'Klussen, familie, ', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '02:00', 18335, 10.2], ['2017/04/20 2:09:20 a.m. EET', 'GW98', '03:45', 'Nee', '', '13:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'Feestje', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 3319, 7.9], ['2017/04/21 12:48:35 a.m. EET', 'GW98', '02:10', 'Ja', 'Paasfeestje', '05:30', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Vrij actief', 'Vrij actief', '10 of meer', 'Minder dan een half uur', 'Feestje, borrel, gezelligheid', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 9452, 11.5], ['2017/04/22 5:46:18 a.m. EET', 'GW98', '01:00', 'Nee', '', '13:00', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'TV kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 2218, 0.3], ['2017/04/23 5:00:25 a.m. EET', 'GW98', '05:45', 'Ja', 'Gamen, studeren uitstellen', '13:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'Gamen, tv kijken', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', '01:00', 2944, 6.8], ['2017/04/24 3:00:11 a.m. EET', 'GW98', '05:00', 'Ja', 'Studeren', '08:45', 'binnen een kwartier', 'Nee', '0 keer', 'Ja', 'Enigszins', 'Matig', 'Nauwelijks actief', 'Zeer actief', '10 of meer', 'Meer dan een uur', 'Studeren, PC', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 3906, 6.8], ['2017/04/25 1:26:30 a.m. EET', 'GW98', '03:30', 'Ja', 'Studie', '13:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'voetbal, studeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '00:00', 3454, 1.9], ['2017/04/26 2:19:46 a.m. EET', 'GW98', '02:00', 'Ja', 'Studie', '05:15', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Matig', 'Licht actief', 'Zeer actief', '6-sep', 'Meer dan een uur', 'Studeren, pc', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 5469, 7.6], ['2017/04/27 3:09:28 a.m. EET', 'GW98', '02:30', 'Ja', 'Studie ', '05:15', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Zeer actief', '10 of meer', 'Meer dan een uur', 'Studeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '02:00', 2991, 8.0], ['2017/04/28 4:27:31 a.m. EET', 'GW98', '03:00', 'Nee', '', '14:00', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 4885, 7.5], ['2017/04/29 1:40:32 a.m. EET', 'GW98', '05:30', 'Ja', 'Gamen', '13:00', 'binnen een kwartier', 'Nee', '0 keer', 'Ja', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'Gaming, PC', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 3602, 7.9], ['2017/04/30 3:17:54 a.m. EET', 'GW98', '02:15', 'Ja', 'Gamen', '05:10', 'binnen een half uur', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Bioscoop, fietsen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '03:00', 4049, 6.7], ['2017/04/10 7:41:11 p.m. EET', 'HA61', '22:45', 'Nee', '', '08:15', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Meer dan een uur', 'Auto rijden, Hond uitlaten, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '21:45', 2011, 12.5], ['2017/04/11 8:53:33 p.m. EET', 'HA61', '21:50', 'Nee', '', '05:35', 'binnen een half uur', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '6-sep', 'Minder dan een uur', 'Computer, hond uitlaten, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '21:50', 6084, 1.9], ['2017/04/12 9:44:13 p.m. EET', 'HA61', '21:55', 'Ja', 'TV programma', '05:40', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Matig', 'Licht actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Eten, hond uitlaten, tv-kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:00', 5439, 1.6], ['2017/04/13 10:01:19 p.m. EET', 'HA61', '21:50', 'Nee', '', '05:40', 'binnen een half uur', 'Ja', '4-5 keer', 'Ja', 'Ja', 'Slecht', 'Zeer actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'Ete,, skeeleren, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '21:50', 13097, 0.1], ['2017/04/14 9:09:57 p.m. EET', 'HA61', '22:10', 'Ja', 'Iets meer tijd voor tandenpoetsen ed', '07:15', 'binnen een kwartier', 'Nee', '4-5 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Eten, hond uitlaten, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:00', 3901, 6.6], ['2017/04/15 9:12:35 p.m. EET', 'HA61', '21:50', 'Nee', '', '07:30', 'binnen een half uur', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Matig', 'Licht actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'Hond uitlaten, tv, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:00', 8840, 0.6], ['2017/04/16 9:09:30 p.m. EET', 'HA61', '21:50', 'Nee', '', '08:30', 'binnen een kwartier', 'Nee', '2-3 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'Hond uitlaten, tv kijken, reis zoen internet', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:00', 4063, 6.8], ['2017/04/17 10:02:00 p.m. EET', 'HA61', '23:00', 'Ja', 'Tv programma', '08:05', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een uur', 'Hond, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:30', 4478, 8.6], ['2017/04/18 10:39:10 p.m. EET', 'HA61', '22:30', 'Nee', '', '08:15', 'binnen een half uur', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Nauwelijks actief', '1-feb', 'Meer dan een uur', 'Hond, tv, internet', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:45', 5567, 7.6], ['2017/04/19 10:48:45 p.m. EET', 'HA61', '22:30', 'Nee', '', '08:30', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Nauwelijks actief', '3-mei', 'Meer dan een uur', 'Hoe, tv, internet', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:45', 3737, 6.3], ['2017/04/20 9:32:14 p.m. EET', 'HA61', '22:45', 'Nee', '', '07:20', 'binnen een uur', 'Ja', '1 keer', 'Ja', 'Helemaal niet', 'Goed', 'Zeer actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'Auto, skeeleren, auto, internet', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', '22:15', 22091, 5.1], ['2017/04/21 12:28:12 a.m. EET', 'HA61', '22:30', 'Nee', 'Nvt', '08:15', 'binnen een uur', 'Ja', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een uur', 'Hond uitlaten, tv, internet', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 6455, 9.8], ['2017/04/22 10:41:51 p.m. EET', 'HA61', '00:15', 'Ja', 'Afspraak met buurvrouw', '08:15', 'binnen een uur', 'Ja', '2-3 keer', 'Ja', 'Een beetje', 'Slecht', 'Licht actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Bijeenkomst IVN, afstemmen en kletsen bij buurvrouw', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 4733, 1.1], ['2017/04/23 10:45:02 p.m. EET', 'HA61', '00:30', 'Ja', 'Uitgelopen jubileum collega', '08:15', 'binnen een half uur', 'Nee', '2-3 keer', 'Ja', 'Ja', 'Slecht', 'Zeer actief', 'Vrij actief', '10 of meer', 'Minder dan een kwartier', 'Fietsen, feest, auto rijden', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 20691, 6.9], ['2017/04/24 10:41:03 p.m. EET', 'HA61', '10:30', 'Nee', '', '08:00', 'binnen een uur', 'Ja', '2-3 keer', 'Ja', 'Ja', 'Matig', 'Zeer actief', 'Vrij actief', '3-mei', 'Minder dan een kwartier', 'Eten, skeelerles, discussie vriendin', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:40', 17630, 9.7], ['2017/04/25 2:25:58 p.m. EET', 'HA61', '22:30', 'Nee', '', '08:10', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Vrij actief', '3-mei', 'Minder dan een uur', 'Eten, hond uitlaten, tv, hotel verkennen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:30', 3214, 2.4], ['2017/04/26 3:08:57 p.m. EET', 'HA61', '01:30', 'Ja', 'Film op televisie', '08:00', 'na meer dan een uur', 'Ja', '2-3 keer', 'Ja', 'Een beetje', 'Slecht', 'Licht actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'hond uitlaten, koffie drinken in lounch, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:00', 2478, 10.4], ['2017/04/27 4:56:07 p.m. EET', 'HA61', '22:15', 'Nee', '', '07:45', 'binnen een half uur', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Matig', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een half uur', 'Hond uitlaten, discussie met vriendin, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:30', 4266, 8.1], ['2017/04/28 3:31:35 p.m. EET', 'HA61', '22:30', 'Nee', '', '09:00', 'binnen een uur', 'Ja', '1 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Vrij actief', '3-mei', 'Minder dan een half uur', 'Hond uitlaten, tv, muziek luisteren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '22:00', 3697, 8.1], ['2017/04/10 9:17:41 p.m. EET', 'he46', '00:00', 'Ja', 'niet willen slapen', '07:00', 'binnen een half uur', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'tv kijken en computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 10193, 12.5], ['2017/04/11 11:09:55 p.m. EET', 'he46', '23:00', 'Nee', '', '07:45', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een kwartier', 'uit eten en radio luisteren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 9424, 1.9], ['2017/04/12 9:50:50 a.m. EET', 'he46', '00:00', 'Ja', 'tv kijken', '07:00', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'tv kijken en studeren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 6061, 1.6], ['2017/04/13 9:50:20 a.m. EET', 'HE46', '23:30', 'Ja', 'voetbal kijken', '06:30', 'binnen een half uur', 'Ja', '1 keer', 'Nee', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'studeren, tv kijken, computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', '23:00', 9072, 0.1], ['2017/04/14 8:23:12 a.m. EET', 'he46', '23:15', 'Nee', '', '06:30', 'binnen een uur', 'Ja', '1 keer', 'Nee', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'voetbal kijken en computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 14043, 6.6], ['2017/04/15 12:03:31 a.m. EET', 'he46', '01:00', 'Ja', 'rusteloos', '07:15', 'binnen een uur', 'Nee', '0 keer', 'Nee', 'Enigszins', 'Matig', 'Zeer actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'tv kijken en computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:30', 15292, 0.6], ['2017/04/16 10:40:24 a.m. EET', 'he46', '00:30', 'Nee', 'tv kijken', '09:00', 'binnen een half uur', 'Nee', '2-3 keer', 'Ja', 'Ja', 'Matig', 'Nauwelijks actief', 'Zeer actief', '6-sep', 'Meer dan een uur', 'tv kijken en computeren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 13163, 6.8], ['2017/04/17 9:37:56 a.m. EET', 'he46', '01:30', 'Ja', 'moest werk afmaken', '08:30', 'binnen een half uur', 'Ja', '0 keer', 'Ja', 'Helemaal niet', 'Goed', 'Licht actief', 'Zeer actief', '10 of meer', 'Meer dan een uur', 'werken achter de pc', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 5937, 8.6], ['2017/04/18 11:40:54 a.m. EET', 'he46', '00:15', 'Ja', 'tv kijken', '09:00', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'tv kijken en computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 7647, 7.6], ['2017/04/19 10:39:32 p.m. EET', 'he46', '00:15', 'Ja', 'tv kijken ', '08:30', 'binnen een half uur', 'Nee', '0 keer', 'Ja', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'tv kijken en computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 11946, 6.3], ['2017/04/20 1:33:26 p.m. EET', 'HE46', '02:00', 'Ja', 'borrelen met vrienden', '09:00', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'kletsen met vrienden', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 7899, 5.1], ['2017/04/21 9:23:50 p.m. EET', 'he46', '01:00', 'Ja', 'gevierd dat ik mijn scriptie had ingeleverd', '06:30', 'binnen een uur', 'Ja', '1 keer', 'Nee', 'Een beetje', 'Matig', 'Licht actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'kletsen met vrienden', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3962, 9.8], ['2017/04/22 8:44:05 p.m. EET', 'he46', '00:00', 'Nee', 'werken', '07:20', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 9057, 1.1], ['2017/04/23 10:43:23 p.m. EET', 'he46', '00:00', 'Ja', 'werken', '06:30', 'binnen een uur', 'Ja', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Vrij actief', '10 of meer', 'Minder dan een half uur', 'computeren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3200, 6.9], ['2017/04/24 7:30:34 p.m. EET', 'he46', '01:00', 'Nee', '', '08:00', 'binnen een half uur', 'Nee', '0 keer', 'Ja', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'feestje gevierd', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 6484, 9.7], ['2017/04/25 9:17:13 p.m. EET', 'he46', '23:30', 'Nee', '', '07:00', 'binnen een uur', 'Nee', '1 keer', 'Ja', 'Een beetje', 'Goed', 'Licht actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 5363, 2.4], ['2017/04/10 10:00:40 a.m. EET', 'MJ87', '23:45', 'Ja', 'Ik wilde nog een extra hoofdstuk lezen', '08:20', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'tv kijken, lezen', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 5244, 12.3], ['2017/04/11 9:56:16 a.m. EET', 'MJ87', '23:30', 'Ja', '', '08:00', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'kletsen, smartphone', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3687, 6.2], ['2017/04/12 3:47:02 p.m. EET', 'MJ87', '23:45', 'Ja', 'Nog even willen appen met iemand', '08:15', 'binnen een half uur', 'Ja', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'gamen, tv, lezen, appen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 14568, 10.3], ['2017/04/13 6:33:28 p.m. EET', 'MJ87', '00:45', 'Nee', '', '09:15', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Nauwelijks actief', '6-sep', 'Minder dan een uur', 'cafe, bus', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 5928, 0.9], ['2017/04/14 1:20:19 p.m. EET', 'MJ87', '00:15', 'Nee', '', '09:45', 'binnen een half uur', 'Nee', '1 keer', 'Ja', 'Enigszins', 'Goed', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3953, 6.6], ['2017/04/15 10:45:45 a.m. EET', 'MJ87', '01:00', 'Ja', 'het was nog gezellig', '07:50', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'gamen, bankhangen', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 8599, 4.8], ['2017/04/16 3:37:49 p.m. EET', 'MJ87', '00:20', 'Ja', 'Was nog aan het overleggen over een online bestelling die wat langer duurde dan verwacht', '08:00', 'binnen een uur', 'Ja', '1 keer', 'Ja', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'tv kijken, telefoon gebruikt', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', '23:30', 4493, 4.5], ['2017/04/17 9:57:40 a.m. EET', 'MJ87', '23:30', 'Nee', '', '08:40', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'bank hangen, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2708, 2.9], ['2017/04/18 12:15:40 p.m. EET', 'MJ87', '00:00', 'Ja', 'Nog even lezen en praten', '07:45', 'binnen een half uur', 'Ja', '0 keer', 'Ja', 'Enigszins', 'Matig', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'Passion kijken, lezen, appen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 6770, 9.9], ['2017/04/19 4:49:25 p.m. EET', 'MJ87', '02:00', 'Ja', 'er kwamen vrienden langs', '11:00', 'binnen een kwartier', 'Nee', '1 keer', 'Ja', 'Enigszins', 'Goed', 'Vrij actief', 'Licht actief', '10 of meer', 'Minder dan een kwartier', 'vrienden, drinken', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 8186, 10.2], ['2017/04/20 11:26:10 a.m. EET', 'MJ87', '01:00', 'Nee', '', '09:30', 'na meer dan een uur', 'Nee', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 10586, 7.9], ['2017/04/21 1:41:35 p.m. EET', 'MJ87', '23:45', 'Nee', '', '10:15', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'verjaardag ,terug rijden naar huis', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:30', 6246, 11.5], ['2017/04/22 10:56:51 a.m. EET', 'MJ87', '00:45', 'Ja', 'Ik was iets later thuis dan verwacht', '08:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'borrel met vrienden, terugrit met metro naar huis', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 8213, 0.3], ['2017/04/23 9:51:38 a.m. EET', 'MJ87', '23:45', 'Ja', 'nog even lezen en door sociale media scrollen', '08:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Licht actief', '3-mei', 'Minder dan een half uur', 'sporten, douchen, tv kijken, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3673, 6.8], ['2017/04/24 10:53:56 a.m. EET', 'MJ87', '00:00', 'Ja', 'nog even lezen', '08:15', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '10 of meer', 'Minder dan een uur', 'bank hangen, lezen in bed', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 6731, 6.8], ['2017/04/25 12:02:42 p.m. EET', 'MJ87', '23:45', 'Ja', 'ik was nog in gesprek op de app', '08:30', 'binnen een kwartier', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'tv kijken, lezen, appen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:30', 5315, 1.9], ['2017/04/26 8:23:18 p.m. EET', 'MJ87', '01:30', 'Ja', 'Was langer gezellig dan verwacht ', '11:00', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Vrij actief', '6-sep', 'Minder dan een kwartier', 'Serie kijken met vriendin, naar huis rijden', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 3123, 7.6], ['2017/04/27 9:27:33 p.m. EET', 'MJ87', '01:00', 'Ja', 'zat nog langer te lezen', '11:30', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'cafe, naar huis met de bus, lezen', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 3325, 8.0], ['2017/04/28 10:02:58 a.m. EET', 'MJ87', '00:30', 'Ja', 'gamen liep uit en lag nog gezellig te kletsen', '08:00', 'binnen een kwartier', 'Nee', '0 keer', 'Ja', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Vrij actief', '1-feb', 'Meer dan een uur', 'gamen, kletsen', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3466, 7.5], ['2017/04/10 9:37:29 a.m. EET', 'PM61', '23:30', 'Nee', '', '07:15', 'binnen een uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Matig', 'Licht actief', 'Nauwelijks actief', '3-mei', 'Minder dan een uur', 'Metroreis, wandelen, televisie, gesprekken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 15943, 12.3], ['2017/04/11 3:33:28 p.m. EET', 'PM61', '23:30', 'Ja', 'Ik was later thuis dan ik had gehoopt', '07:30', 'na meer dan een uur', 'Ja', '0 keer', 'Nee', 'Ja', 'Matig', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Minder dan een uur', 'Bezoek bij schoonouders, autorijden, betalingen afhandelen, douchen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 4069, 6.2], ['2017/04/12 1:51:50 a.m. EET', 'PM61', '23:46', 'Ja', 'Ik was een game aan het uitspelen', '07:30', 'na meer dan een uur', 'Ja', '2-3 keer', 'Nee', 'Ja', 'Slecht', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Meer dan een uur', 'Gamen, conversatie via telefoon-app', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 2397, 10.3], ['2017/04/13 4:48:23 p.m. EET', 'PM61', '02:00', 'Ja', 'Er kwamen vrienden langs', '11:00', 'binnen een uur', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Zeer actief', 'Licht actief', '6-sep', 'Minder dan een kwartier', 'Met vrienden op de bank zitten', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 5646, 0.9], ['2017/04/14 12:49:48 p.m. EET', 'PM61', '00:00', 'Nee', '', '07:50', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Enigszins', 'Matig', 'Licht actief', 'Licht actief', '1-feb', 'Meer dan een uur', 'Afwassen, gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:00', 5918, 6.6], ['2017/04/15 1:22:06 p.m. EET', 'PM61', '00:15', 'Nee', '', '09:30', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Licht actief', '1-feb', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 5743, 4.8], ['2017/04/16 8:46:16 a.m. EET', 'PM61', '23:45', 'Ja', 'Het was maar een kwarter; maar ik was aan het gamen', '07:45', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Enigszins', 'Goed', 'Vrij actief', 'Vrij actief', '3-mei', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 5609, 4.5], ['2017/04/17 10:05:28 a.m. EET', 'PM61', '23:30', 'Nee', '', '07:30', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', "Gamen, Skype, ontspannings-video's kijken", 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 2499, 2.9], ['2017/04/18 10:41:26 p.m. EET', 'PM61', '00:30', 'Ja', 'Mijn serie (The OA) was interessanter dan ik van te voren had zien aankomen', '07:45', 'binnen een kwartier', 'Nee', '0 keer', 'Ja', 'Ja', 'Matig', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'Serie kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 11129, 9.9], ['2017/04/19 1:33:01 p.m. EET', 'PM61', '00:15', 'Ja', 'Ik was aan het gamen met een vriend en dat liep uit', '07:45', 'na meer dan een uur', 'Ja', '2-3 keer', 'Nee', 'Enigszins', 'Slecht', 'Nauwelijks actief', 'Vrij actief', '3-mei', 'Meer dan een uur', "Gamen, ontspanningsvideo's kijken", 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 7209, 10.2], ['2017/04/20 4:15:11 p.m. EET', 'PM61', '23:30', 'Nee', '', '07:50', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Enigszins', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', "Gamen, ontspanningsvideo's kijken", 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:30', 8042, 7.9], ['2017/04/21 11:25:00 a.m. EET', 'PM61', '01:00', 'Nee', '', '21:30', 'na meer dan een uur', 'Nee', '0 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Vrij actief', '3-mei', 'Minder dan een half uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 4918, 11.5], ['2017/04/22 2:10:20 p.m. EET', 'PM61', '00:00', 'Nee', '', '10:00', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'Eten, meerijden in auto, praten', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 10661, 0.3], ['2017/04/23 9:43:26 a.m. EET', 'PM61', '23:30', 'Nee', '', '08:00', 'binnen een uur', 'Ja', '1 keer', 'Nee', 'Enigszins', 'Matig', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Meer dan een uur', 'Reizen met metro, reizen met bus, gamen, op YouTube rondhangen', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 4582, 6.8], ['2017/04/24 12:02:32 a.m. EET', 'PM61', '23:50', 'Ja', 'Gamen liep uit', '08:05', 'binnen een half uur', 'Nee', '0 keer', 'Nee', 'Ja', 'Slecht', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Meer dan een uur', 'Naar huis fietsen, gamen', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3420, 6.8], ['2017/04/25 3:17:19 p.m. EET', 'PM61', '01:30', 'Ja', 'Het was gezellig op het terras', '13:00', 'binnen een half uur', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Licht actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'Op terras zitten, fietsen, treinreizen, fietsen', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 3311, 1.9], ['2017/04/26 9:27:41 p.m. EET', 'PM61', '01:00', 'Nee', '', '11:30', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Nauwelijks actief', 'Zeer actief', '1-feb', 'Meer dan een uur', 'Gamen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '00:00', 3619, 7.6], ['2017/04/27 12:55:27 a.m. EET', 'PM61', '00:15', 'Ja', 'Gamen en douchen liepen uit', '07:40', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Een beetje', 'Goed', 'Licht actief', 'Licht actief', '10 of meer', 'Meer dan een uur', 'Gamen, douchen', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '01:00', 4158, 8.0], ['2017/04/10 1:52:48 p.m. EET', 'wh18', '23:00', 'Nee', '', '08:30', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Erg goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een kwartier', 'tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Verleiding ervaren en besloten om er niet aan toe te geven', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2653, 12.5], ['2017/04/11 8:35:07 a.m. EET', 'wh18', '00:30', 'Ja', "interessant tv programma over de panda's", '08:00', 'binnen een half uur', 'Ja', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'tv kijken, bellen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2267, 1.9], ['2017/04/12 8:36:22 a.m. EET', 'wh18', '23:50', 'Nee', '', '08:20', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'tv kijken, bridgen op computer, bellen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 3345, 1.6], ['2017/04/13 9:00:33 a.m. EET', 'wh18', '23:30', 'Nee', 'ging niet later naar bed', '08:15', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Meer dan een uur', 'lang telefoongesprek vriendin, tv kijken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 3589, 0.1], ['2017/04/14 10:09:44 a.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '09:30', 'binnen een uur', 'Ja', '2-3 keer', 'Nee', 'Een beetje', 'Matig', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een kwartier', 'vrienden te eten, gezellig', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 4121, 6.6], ['2017/04/15 9:05:26 a.m. EET', 'wh18', '23:45', 'Nee', 'ging niet later', '08:45', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '6-sep', 'Meer dan een uur', 'tv gekeken ', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2284, 0.6], ['2017/04/16 10:07:17 a.m. EET', 'wh18', '23:30', 'Nee', 'niet later naar bed gegaan', '08:15', 'binnen een half uur', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '3-mei', 'Minder dan een half uur', 'tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 5169, 6.8], ['2017/04/17 9:45:03 a.m. EET', 'wh18', '00:45', 'Ja', 'drukke avond met bridgen', '09:15', 'binnen een uur', 'Ja', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Licht actief', 'Vrij actief', '6-sep', 'Minder dan een half uur', 'bridge, computer, lezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 8694, 8.6], ['2017/04/18 9:13:42 a.m. EET', 'wh18', '00:15', 'Ja', 'gezellig', '08:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Meer dan een uur', 'even langs geweest bij moeder, daarna tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 6612, 7.6], ['2017/04/19 8:14:31 a.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '08:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '10 of meer', 'Minder dan een kwartier', 'tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 3074, 6.3], ['2017/04/20 8:48:56 a.m. EET', 'wh18', '23:30', 'Nee', 'niet later naar bed', '08:30', 'binnen een kwartier', 'Nee', '2-3 keer', 'Nee', 'Enigszins', 'Matig', 'Licht actief', 'Licht actief', '10 of meer', 'Minder dan een half uur', 'verjaardag feestje, daarna tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 10536, 5.1], ['2017/04/21 9:11:30 a.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '08:30', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een kwartier', 'tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 2671, 9.8], ['2017/04/22 9:48:41 a.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '08:15', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '6-sep', 'Minder dan een uur', 'kaarten met vrienden, gezellig', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2844, 1.1], ['2017/04/23 8:39:35 a.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '08:15', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Minder dan een half uur', 'op bezoek geweest bij vrienden', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 2596, 6.9], ['2017/04/24 10:14:27 a.m. EET', 'wh18', '02:00', 'Ja', 'feestje', '09:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Matig', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Minder dan een kwartier', 'feestje', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 2933, 9.7], ['2017/04/25 9:16:09 a.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '09:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'in cafe geweest en nog even tv gekeken', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 3577, 2.4], ['2017/04/26 12:46:40 a.m. EET', 'wh18', '23:30', 'Nee', 'niet later', '08:15', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Nauwelijks actief', '10 of meer', 'Minder dan een kwartier', 'bezoek dus druk', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:45', 3945, 10.4], ['2017/04/27 2:52:48 p.m. EET', 'wh18', '23:45', 'Nee', 'niet later', '10:00', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Vrij actief', '10 of meer', 'Minder dan een kwartier', 'klaverjassen met vrienden thuis', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 2557, 8.1], ['2017/04/28 12:31:21 p.m. EET', 'wh18', '00:15', 'Ja', 'gezellig', '08:15', 'binnen een kwartier', 'Nee', '1 keer', 'Nee', 'Helemaal niet', 'Goed', 'Nauwelijks actief', 'Licht actief', '6-sep', 'Minder dan een half uur', 'tv gekeken en gelezen', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Toegegeven aan verleiding', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', 'Geen verleiding ervaren', '23:30', 4651, 8.1]]
| 102,621
| 102,621
| 0.699711
| 14,386
| 102,621
| 4.988948
| 0.042889
| 0.419013
| 0.508241
| 0.477909
| 0.919006
| 0.889203
| 0.873055
| 0.859957
| 0.847766
| 0.834919
| 0
| 0.068209
| 0.124107
| 102,621
| 1
| 102,621
| 102,621
| 0.730266
| 0
| 0
| 0
| 0
| 0
| 0.778956
| 0.001111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 14
|
cb8c764b4267e557476ee312d6f1ad2a09ef54b6
| 116,122
|
py
|
Python
|
soumen1.0.py
|
gakeppuchi/nagashi_soumen
|
0ba381bf87044293863218dd872bfbc89dd85d58
|
[
"MIT"
] | null | null | null |
soumen1.0.py
|
gakeppuchi/nagashi_soumen
|
0ba381bf87044293863218dd872bfbc89dd85d58
|
[
"MIT"
] | null | null | null |
soumen1.0.py
|
gakeppuchi/nagashi_soumen
|
0ba381bf87044293863218dd872bfbc89dd85d58
|
[
"MIT"
] | null | null | null |
#全体のプログラム
#ウィンドウを作るモジュールを呼び出す
from tkinter import*
#ランダムモジュールを読み込む
import random
import copy
#ウィンドウの作成
win = Tk()
cv = Canvas(win, width = 720, height = 480)
cv.pack()
win.title(u"流しそうめん")
#画面の描画
def draw_screen():
#画面クリア
cv.delete('all')
#キャンバスの作成
cv.create_rectangle( 0, 0, 720, 480, fill="white", width = 0 )
#背景描画
def haikei():
cv.create_line ( 0, 175, 800, 175, fill='gray', width = 5 ) #LINE1
cv.create_line ( 0, 225, 800, 225, fill='gray', width = 5 ) #LINE2
cv.create_line ( 0, 275, 800, 275, fill='gray', width = 5 ) #LINE3
cv.create_line ( 0, 325, 800, 325, fill='gray', width = 5 ) #LINE4
cv.create_line ( 0, 375, 800, 375, fill='gray', width = 5 ) #LINE5
cv.create_line ( 0, 425, 800, 425, fill='gray', width = 5 ) #LINE6
cv.create_text ( 100, 30, text="SCORE", font=("Helvetica", 35, "bold") )
cv.create_text ( 300, 30, text=score , font=("Helvetica", 35, "bold") )
if life>=1:
cv.create_text ( 480, 30, text="LIFE" , font=("Helvetica", 35, "bold") )
elif life<=0:
cv.create_text ( 540, 30, text="GAME OVER" , font=("Helvetica", 35, "bold") )
def life_draw():
global life
if life == 3:
heart(550)
heart(600)
heart(650)
if life == 2:
heart(550)
heart(600)
if life == 1:
heart(550)
def heart(kokoro):
cv.create_rectangle ( 8+kokoro, 4+5, 16+kokoro, 8+5, fill='black' ) #ハートのドット絵描画
cv.create_rectangle ( 28+kokoro, 4+5, 36+kokoro, 8+5, fill='black' )
cv.create_rectangle ( 4+kokoro, 8+5, 20+kokoro, 12+5, fill='black' )
cv.create_rectangle ( 24+kokoro, 8+5, 40+kokoro, 12+5, fill='black' )
cv.create_rectangle ( 0+kokoro, 12+5, 44+kokoro, 24+5, fill='black' )
cv.create_rectangle ( 4+kokoro, 24+5, 40+kokoro, 28+5, fill='black' )
cv.create_rectangle ( 8+kokoro, 28+5, 36+kokoro, 32+5, fill='black' )
cv.create_rectangle ( 12+kokoro, 32+5, 32+kokoro, 36+5, fill='black' )
cv.create_rectangle ( 16+kokoro, 36+5, 28+kokoro, 40+5, fill='black' )
cv.create_rectangle ( 20+kokoro, 40+5, 24+kokoro, 44+5, fill='black' )
Amap =[ [ 0, 1, 2, 1, 0, 1, 0, 2, 0, 2, 3, 1, 0, 1, 2, 0, 0, 2, 0, 1, 0, 0, 0, 1, 2, ],
[ 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 2, 1, 0, 0, 0, 2, 2, 0, 0, 1, 3, 1, 0, ],
[ 2, 1, 2, 3, 2, 1, 1, 2, 3, 2, 3, 1, 3, 2, 2, 3, 1, 0, 0, 0, 0, 0, 1, 1, 0, ],
[ 0, 0, 1, 2, 0, 0, 1, 2, 0, 0, 1, 0, 1, 0, 1, 2, 1, 0, 2, 1, 1, 3, 0, 1, 3, ],
[ 2, 0, 0, 1, 2, 1, 3, 2, 3, 2, 1, 1, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 2, ],
[ 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 1, 3, 0, 2, 0, 0, 1, ],
[ 0, 1, 2, 1, 0, 1, 0, 2, 0, 2, 3, 1, 0, 1, 2, 0, 0, 2, 0, 1, 0, 0, 0, 1, 2, ],
[ 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 2, 1, 0, 0, 0, 1, 2, 0, 0, 1, 3, 1, 0, ],
[ 2, 1, 2, 3, 2, 1, 1, 2, 3, 2, 3, 1, 1, 2, 1, 3, 1, 0, 0, 0, 0, 0, 1, 1, 0, ],
[ 0, 0, 1, 2, 0, 0, 1, 2, 0, 0, 1, 0, 1, 0, 1, 2, 1, 0, 2, 1, 1, 3, 0, 1, 3, ],
[ 2, 2, 2, 2, 2, 1, 1, 0, 1, 1, 2, 0, 0, 0, 2, 1, 1, 0, 1, 1, 2, 2, 2, 2, 2, ], #10
[ 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 3, 1, 1, 0, 0, 1, 0, 3, 0, 2, 1, 0, 1, ],
[ 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, ],
[ 0, 1, 1, 0, 0, 0, 1, 0, 0, 2, 1, 0, 2, 1, 0, 1, 0, 2, 2, 0, 0, 1, 3, 1, 3, ],
[ 2, 0, 2, 0, 2, 1, 0, 1, 3, 2, 3, 1, 1, 0, 2, 3, 1, 0, 0, 1, 0, 1, 0, 1, 0, ],
[ 3, 0, 1, 2, 0, 0, 3, 2, 0, 0, 1, 1, 0, 0, 1, 2, 1, 0, 2, 1, 0, 3, 1, 1, 0, ],
[ 1, 0, 0, 0, 1, 1, 0, 2, 3, 2, 0, 1, 3, 2, 2, 1, 2, 2, 2, 2, 2, 0, 2, 1, 2, ],
[ 0, 0, 1, 0, 0, 1, 3, 0, 0, 0, 0, 0, 3, 1, 1, 2, 0, 1, 1, 3, 0, 1, 0, 0, 1, ],
[ 0, 3, 2, 1, 0, 3, 0, 2, 0, 1, 1, 2, 0, 1, 2, 1, 0, 2, 0, 1, 0, 3, 0, 1, 2, ],
[ 0, 1, 1, 0, 2, 2, 1, 0, 1, 0, 3, 1, 2, 1, 0, 2, 0, 2, 2, 0, 0, 1, 3, 1, 0, ],
[ 2, 1, 2, 1, 2, 0, 1, 2, 1, 2, 1, 1, 3, 2, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, ], #20
[ 1, 0, 1, 2, 1, 0, 1, 2, 1, 0, 1, 0, 1, 0, 1, 2, 1, 0, 1, 2, 1, 3, 0, 1, 3, ],
[ 2, 3, 3, 1, 2, 1, 3, 2, 3, 2, 3, 1, 1, 2, 1, 3, 2, 2, 2, 2, 2, 2, 1, 1, 2, ],
[ 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 3, 0, 0, 1, 0, 0, 1, 3, 1, 2, 0, 0, 3, ],
[ 2, 1, 0, 1, 0, 2, 0, 2, 1, 2, 3, 1, 0, 1, 2, 3, 0, 2, 0, 1, 3, 0, 0, 1, 2, ],
[ 0, 2, 1, 0, 3, 1, 2, 0, 0, 0, 0, 3, 1, 1, 0, 0, 0, 2, 1, 0, 0, 1, 3, 1, 0, ],
[ 2, 1, 2, 0, 0, 0, 1, 2, 3, 2, 0, 1, 0, 2, 2, 3, 1, 0, 0, 2, 0, 0, 1, 1, 0, ],
[ 1, 0, 1, 2, 1, 1, 1, 2, 0, 0, 1, 0, 1, 0, 1, 2, 1, 0, 1, 1, 1, 3, 0, 1, 1, ],
[ 1, 0, 0, 1, 2, 1, 3, 2, 3, 1, 3, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, ],
[ 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 3, 1, 0, 2, 0, 0, 1, 3, 0, 2, 1, 0, 1, ],]
Zmap = [ 0, 2, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, ]
#マップ描画
def map():
global Zmap
for i in range(25):
if Zmap[i] == 1:
bar(int(i%5)*120+50,int(i/5)*50+150)
elif Zmap[i] == 2:
pt5(int(i%5)*120+50,int(i/5)*50+150)
elif Zmap[i] == 3:
pt10(int(i%5)*120+50,int(i/5)*50+150)
#接触
def touch():
global mode
global tekuteku
global score
global Zmap
if (line==1)and(-30<a_yoko< 10):
if (Zmap[0]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[0]==2:
score = score +5
Zmap[0]=0
if Zmap[0]==3:
score = score +10
Zmap[0]=0
if (line==1)and( 90<a_yoko<130):
if (Zmap[1]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[1]==2:
score = score +5
Zmap[1]=0
if Zmap[1]==3:
score = score +10
Zmap[1]=0
if (line==1)and(210<a_yoko<250):
if (Zmap[2]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[2]==2:
score = score +5
Zmap[2]=0
if Zmap[2]==3:
score = score +10
Zmap[2]=0
if (line==1)and(330<a_yoko<370):
if (Zmap[3]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[3]==2:
score = score +5
Zmap[3]=0
if Zmap[3]==3:
score = score +10
Zmap[3]=0
if (line==1)and(450<a_yoko<490):
if (Zmap[4]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[4]==2:
score = score +5
Zmap[4]=0
if Zmap[4]==3:
score = score +10
Zmap[4]=0
if (line==2)and(-30<a_yoko< 10):
if (Zmap[5]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[5]==2:
score = score +5
Zmap[5]=0
if Zmap[5]==3:
score = score +10
Zmap[5]=0
if (line==2)and( 90<a_yoko<130):
if (Zmap[6]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[6]==2:
score = score +5
Zmap[6]=0
if Zmap[6]==3:
score = score +10
Zmap[6]=0
if (line==2)and(210<a_yoko<250):
if (Zmap[7]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[7]==2:
score = score +5
Zmap[7]=0
if Zmap[7]==3:
score = score +10
Zmap[7]=0
if (line==2)and(330<a_yoko<370):
if (Zmap[8]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[8]==2:
score = score +5
Zmap[8]=0
if Zmap[8]==3:
score = score +10
Zmap[8]=0
if (line==2)and(450<a_yoko<490):
if (Zmap[9]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[9]==2:
score = score +5
Zmap[9]=0
if Zmap[9]==3:
score = score +10
Zmap[9]=0
if (line==3)and(-30<a_yoko< 10):
if (Zmap[10]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[10]==2:
score = score +5
Zmap[10]=0
if Zmap[10]==3:
score = score +10
Zmap[10]=0
if (line==3)and( 90<a_yoko<130):
if (Zmap[11]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[11]==2:
score = score +5
Zmap[11]=0
if Zmap[11]==3:
score = score +10
Zmap[11]=0
if (line==3)and(210<a_yoko<250):
if (Zmap[12]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[12]==2:
score = score +5
Zmap[12]=0
if Zmap[12]==3:
score = score +10
Zmap[12]=0
if (line==3)and(330<a_yoko<370):
if (Zmap[13]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[13]==2:
score = score +5
Zmap[13]=0
if Zmap[13]==3:
score = score +10
Zmap[13]=0
if (line==3)and(450<a_yoko<490):
if (Zmap[14]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[14]==2:
score = score +5
Zmap[14]=0
if Zmap[14]==3:
score = score +10
Zmap[14]=0
if (line==4)and(-30<a_yoko< 10):
if (Zmap[15]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[15]==2:
score = score +5
Zmap[15]=0
if Zmap[15]==3:
score = score +10
Zmap[15]=0
if (line==4)and( 90<a_yoko<130):
if (Zmap[16]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[16]==2:
score = score +5
Zmap[16]=0
if Zmap[16]==3:
score = score +10
Zmap[16]=0
if (line==4)and(210<a_yoko<250):
if (Zmap[17]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[17]==2:
score = score +5
Zmap[17]=0
if Zmap[17]==3:
score = score +10
Zmap[17]=0
if (line==4)and(330<a_yoko<370):
if (Zmap[18]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[18]==2:
score = score +5
Zmap[18]=0
if Zmap[18]==3:
score = score +10
Zmap[18]=0
if (line==4)and(450<a_yoko<490):
if (Zmap[19]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[19]==2:
score = score +5
Zmap[19]=0
if Zmap[19]==3:
score = score +10
Zmap[19]=0
if (line==5)and(-30<a_yoko< 10):
if (Zmap[20]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[20]==2:
score = score +5
Zmap[20]=0
if Zmap[20]==3:
score = score +10
Zmap[20]=0
if (line==5)and( 90<a_yoko<130):
if (Zmap[21]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[21]==2:
score = score +5
Zmap[21]=0
if Zmap[21]==3:
score = score +10
Zmap[21]=0
if (line==5)and(210<a_yoko<250):
if (Zmap[22]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[22]==2:
score = score +5
Zmap[22]=0
if Zmap[22]==3:
score = score +10
Zmap[22]=0
if (line==5)and(330<a_yoko<370):
if (Zmap[23]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[23]==2:
score = score +5
Zmap[23]=0
if Zmap[23]==3:
score = score +10
Zmap[23]=0
if (line==5)and(450<a_yoko<490):
if (Zmap[24]==1)and(mode == 1):
mode = 3
tekuteku = 21
if Zmap[24]==2:
score = score +5
Zmap[24]=0
if Zmap[24]==3:
score = score +10
Zmap[24]=0
#ハードル描画
def bar(map_yoko,map_tate):
cv.create_line ( 10 +map_yoko, 40 +map_tate, 10+map_yoko, 75+map_tate, fill='black', width = 3 )
cv.create_line ( 40 +map_yoko,-10 +map_tate, 40+map_yoko, 25+map_tate, fill='black', width = 3 )
cv.create_line ( 10 +map_yoko, 50 +map_tate, 40+map_yoko, 0+map_tate, fill='black', width = 5 )
cv.create_line ( 10 +map_yoko, 45 +map_tate, 40+map_yoko, -5+map_tate, fill='black', width = 5 )
#アイテム2描画
def pt5(map_yoko,map_tate):
cv.create_line ( 26 +map_yoko, 2 +32+map_tate, 40+map_yoko, 2+32+map_tate, fill='black', width = 5 ) #5
cv.create_line ( 26 +map_yoko, 2 +32+map_tate, 19+map_yoko, 14+32+map_tate, fill='black', width = 5 )
cv.create_line ( 19 +map_yoko, 14 +32+map_tate, 33+map_yoko, 14+32+map_tate, fill='black', width = 5 )
cv.create_line ( 33 +map_yoko, 14 +32+map_tate, 26+map_yoko, 26+32+map_tate, fill='black', width = 5 )
cv.create_line ( 26 +map_yoko, 26 +32+map_tate, 12+map_yoko, 26+32+map_tate, fill='black', width = 5 )
cv.create_line ( 43 +map_yoko, 14 +32+map_tate, 29+map_yoko, 38+32+map_tate, fill='black', width = 5 ) #p
cv.create_line ( 43 +map_yoko, 14 +32+map_tate, 57+map_yoko, 14+32+map_tate, fill='black', width = 5 )
cv.create_line ( 57 +map_yoko, 14 +32+map_tate, 50+map_yoko, 26+32+map_tate, fill='black', width = 5 )
cv.create_line ( 50 +map_yoko, 26 +32+map_tate, 34+map_yoko, 26+32+map_tate, fill='black', width = 5 )
#アイテム3描画
def pt10(map_yoko,map_tate):
cv.create_line ( 16 +map_yoko, 2 +32+map_tate, 2+map_yoko, 25+32+map_tate, fill='black', width = 5 ) #10
cv.create_line ( 26 +map_yoko, 2 +32+map_tate, 12+map_yoko, 25+32+map_tate, fill='black', width = 5 )
cv.create_line ( 26 +map_yoko, 2 +32+map_tate, 40+map_yoko, 2+32+map_tate, fill='black', width = 5 )
cv.create_line ( 40 +map_yoko, 2 +32+map_tate, 26+map_yoko, 25+32+map_tate, fill='black', width = 5 )
cv.create_line ( 12 +map_yoko, 25 +32+map_tate, 26+map_yoko, 25+32+map_tate, fill='black', width = 5 )
cv.create_line ( 43 +map_yoko, 14 +32+map_tate, 29+map_yoko, 38+32+map_tate, fill='black', width = 5 ) #p
cv.create_line ( 43 +map_yoko, 14 +32+map_tate, 57+map_yoko, 14+32+map_tate, fill='black', width = 5 )
cv.create_line ( 57 +map_yoko, 14 +32+map_tate, 50+map_yoko, 26+32+map_tate, fill='black', width = 5 )
cv.create_line ( 50 +map_yoko, 26 +32+map_tate, 34+map_yoko, 26+32+map_tate, fill='black', width = 5 )
#ランニング アニメーション
def runman():
if tekuteku == 1:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (20)*3 +a_yoko, (17)*3 +a_tate, (23)*3 +a_yoko, (44)*3 +a_tate, (32)*3 +a_yoko, (41)*3 +a_tate ) #体
cv.create_line ( (25)*3 +a_yoko, (31)*3 +a_tate, (19)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (19)*3 +a_yoko, (37)*3 +a_tate, (13)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (11-3)*3 +a_yoko, (40-3)*3 +a_tate, (11+3)*3 +a_yoko, (40+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (23)*3 +a_yoko, (30)*3 +a_tate, (30)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (30)*3 +a_yoko, (35)*3 +a_tate, (30)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (43-3)*3 +a_tate, (30+3)*3 +a_yoko, (43+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (26)*3 +a_yoko, (40)*3 +a_tate, (32)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (32)*3 +a_yoko, (52)*3 +a_tate, (39)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (39-3)*3 +a_yoko, (62-3)*3 +a_tate, (39+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (26)*3 +a_yoko, (41)*3 +a_tate, (14)*3 +a_yoko, (47)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (14)*3 +a_yoko, (47)*3 +a_tate, (21)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (20-3)*3 +a_yoko, (56-3)*3 +a_tate, (20+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (20-12)*3 +a_yoko, (17-12)*3 +a_tate, (20+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (20-10)*3 +a_yoko, (17-10)*3 +a_tate, (20+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (20 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 2:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (76-55)*3 +a_yoko, (21)*3 +a_tate, (77-55)*3 +a_yoko, (48)*3 +a_tate, (87-55)*3 +a_yoko, (46)*3 +a_tate ) #体
cv.create_line ( (78-55)*3 +a_yoko, (34)*3 +a_tate, (73-55)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (73-55)*3 +a_yoko, (39)*3 +a_tate, (68-55)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (69-55-3)*3 +a_yoko, (37-3)*3 +a_tate, (69-55+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (80-55)*3 +a_yoko, (32)*3 +a_tate, (88-55)*3 +a_yoko, (33)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (88-55)*3 +a_yoko, (33)*3 +a_tate, (88-55)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (89-55-3)*3 +a_yoko, (42-3)*3 +a_tate, (89-55+3)*3 +a_yoko, (42+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (82-55)*3 +a_yoko, (44)*3 +a_tate, (87-55)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (87-55)*3 +a_yoko, (51)*3 +a_tate, (95-55)*3 +a_yoko, (47)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (95-55-3)*3 +a_yoko, (47-3)*3 +a_tate, (95-55+3)*3 +a_yoko, (47+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (80-55)*3 +a_yoko, (42)*3 +a_tate, (76-55)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (76-55)*3 +a_yoko, (55)*3 +a_tate, (74-55)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (72-55-3)*3 +a_yoko, (64-3)*3 +a_tate, (72-55+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (74-55-12)*3 +a_yoko, (19-12)*3 +a_tate, (74-55+12)*3 +a_yoko, (19+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (74-55-10)*3 +a_yoko, (19-10)*3 +a_tate, (74-55+10)*3 +a_yoko, (19+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (19 )*3 +a_yoko, (19 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 3:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (63-36)*3 +a_yoko, (24)*3 +a_tate, (65-36)*3 +a_yoko, (51)*3 +a_tate, (75-36)*3 +a_yoko, (49)*3 +a_tate ) #体
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (66-36)*3 +a_yoko, (41)*3 +a_tate, (62-36)*3 +a_yoko, (44)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (62-36-3)*3 +a_yoko, (44-3)*3 +a_tate, (62-36+3)*3 +a_yoko, (44+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (65-36)*3 +a_yoko, (34)*3 +a_tate, (71-36)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (71-36)*3 +a_yoko, (40)*3 +a_tate, (68-36)*3 +a_yoko, (48)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (66-36-3)*3 +a_yoko, (49-3)*3 +a_tate, (66-36+3)*3 +a_yoko, (49+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (69-36)*3 +a_yoko, (48)*3 +a_tate, (60-36)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (60-36)*3 +a_yoko, (54)*3 +a_tate, (72-36)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (73-36-3)*3 +a_yoko, (56-3)*3 +a_tate, (73-36+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (71-36)*3 +a_yoko, (48)*3 +a_tate, (66-36)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (66-36)*3 +a_yoko, (55)*3 +a_tate, (73-36)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (72-36-3)*3 +a_yoko, (61-3)*3 +a_tate, (72-36+3)*3 +a_yoko, (61+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (61-36-12)*3 +a_yoko, (21-12)*3 +a_tate, (61-36+12)*3 +a_yoko, (21+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (61-36-10)*3 +a_yoko, (21-10)*3 +a_tate, (61-36+10)*3 +a_yoko, (21+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (21 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 4:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (54-27)*3 +a_yoko, (20)*3 +a_tate, (58-27)*3 +a_yoko, (48)*3 +a_tate, (66-27)*3 +a_yoko, (44)*3 +a_tate ) #体
cv.create_line ( (56-27)*3 +a_yoko, (31)*3 +a_tate, (65-27)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (65-27)*3 +a_yoko, (35)*3 +a_tate, (65-27)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (65-27-3)*3 +a_yoko, (43-3)*3 +a_tate, (65-27+3)*3 +a_yoko, (43+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (56-27)*3 +a_yoko, (33)*3 +a_tate, (52-27)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (52-27)*3 +a_yoko, (40)*3 +a_tate, (49-27)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (49-27-3)*3 +a_yoko, (41-3)*3 +a_tate, (49-27+3)*3 +a_yoko, (41+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (62-27)*3 +a_yoko, (42)*3 +a_tate, (50-27)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (50-27)*3 +a_yoko, (50)*3 +a_tate, (57-27)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (56-27-3)*3 +a_yoko, (56-3)*3 +a_tate, (56-27+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (61-27)*3 +a_yoko, (42)*3 +a_tate, (66-27)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (66-27)*3 +a_yoko, (54)*3 +a_tate, (73-27)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (72-27-3)*3 +a_yoko, (61-3)*3 +a_tate, (72-27+3)*3 +a_yoko, (61+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (54-27-12)*3 +a_yoko, (19-12)*3 +a_tate, (54-27+12)*3 +a_yoko, (19+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (54-27-10)*3 +a_yoko, (19-10)*3 +a_tate, (54-27+10)*3 +a_yoko, (19+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (27 )*3 +a_yoko, (19 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 5:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (43-18)*3 +a_yoko, (21)*3 +a_tate, (44-18)*3 +a_yoko, (49)*3 +a_tate, (53-18)*3 +a_yoko, (47)*3 +a_tate ) #体
cv.create_line ( (45-18)*3 +a_yoko, (33)*3 +a_tate, (55-18)*3 +a_yoko, (33)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (55-18)*3 +a_yoko, (33)*3 +a_tate, (58-18)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (57-18-3)*3 +a_yoko, (42-3)*3 +a_tate, (57-18+3)*3 +a_yoko, (42+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (44-18)*3 +a_yoko, (33)*3 +a_tate, (38-18)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (38-18)*3 +a_yoko, (40)*3 +a_tate, (40-18)*3 +a_yoko, (38)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (41-18-3)*3 +a_yoko, (40-3)*3 +a_tate, (41-18+3)*3 +a_yoko, (40+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (49-18)*3 +a_yoko, (43)*3 +a_tate, (43-18)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (43-18)*3 +a_yoko, (55)*3 +a_tate, (40-18)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (39-18-3)*3 +a_yoko, (64-3)*3 +a_tate, (39-18+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (46-18)*3 +a_yoko, (45)*3 +a_tate, (52-18)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (52-18)*3 +a_yoko, (54)*3 +a_tate, (62-18)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (62-18-3)*3 +a_yoko, (52-3)*3 +a_tate, (62-18+3)*3 +a_yoko, (52+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (41-18-12)*3 +a_yoko, (20-12)*3 +a_tate, (41-18+12)*3 +a_yoko, (20+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (41-18-10)*3 +a_yoko, (20-10)*3 +a_tate, (41-18+10)*3 +a_yoko, (20+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (23 )*3 +a_yoko, (20 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 6:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (32-9)*3 +a_yoko, (23)*3 +a_tate, (33-9)*3 +a_yoko, (50)*3 +a_tate, (42-9)*3 +a_yoko, (49)*3 +a_tate ) #体
cv.create_line ( (33-9)*3 +a_yoko, (34)*3 +a_tate, (39-9)*3 +a_yoko, (42)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (39-9)*3 +a_yoko, (42)*3 +a_tate, (35-9)*3 +a_yoko, (47)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (34-9-3)*3 +a_yoko, (48-3)*3 +a_tate, (34-9+3)*3 +a_yoko, (48+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (34-9)*3 +a_yoko, (34)*3 +a_tate, (36-9)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (36-9)*3 +a_yoko, (37)*3 +a_tate, (31-9)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-9-3)*3 +a_yoko, (44-3)*3 +a_tate, (30-9+3)*3 +a_yoko, (44+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (40-9)*3 +a_yoko, (46)*3 +a_tate, (33-9)*3 +a_yoko, (57)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (33-9)*3 +a_yoko, (57)*3 +a_tate, (41-9)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (39-9-3)*3 +a_yoko, (64-3)*3 +a_tate, (39-9+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (37-9)*3 +a_yoko, (46)*3 +a_tate, (28-9)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (28-9)*3 +a_yoko, (53)*3 +a_tate, (38-9)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (40-9-3)*3 +a_yoko, (56-3)*3 +a_tate, (40-9+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (30-9-12)*3 +a_yoko, (22-12)*3 +a_tate, (30-9+12)*3 +a_yoko, (22+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (30-9-10)*3 +a_yoko, (22-10)*3 +a_tate, (30-9+10)*3 +a_yoko, (22+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (21 )*3 +a_yoko, (22 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
#ジャンプ アニメーション
if tekuteku == 11:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (19)*3 +a_yoko, (22)*3 +a_tate, (33)*3 +a_yoko, (46)*3 +a_tate, (40)*3 +a_yoko, (39)*3 +a_tate ) #体
cv.create_line ( (26)*3 +a_yoko, (30)*3 +a_tate, (22)*3 +a_yoko, (38)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (38)*3 +a_tate, (16)*3 +a_yoko, (38)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (15-3)*3 +a_yoko, (37-3)*3 +a_tate, (15+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (26)*3 +a_yoko, (30)*3 +a_tate, (27)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (27)*3 +a_yoko, (39)*3 +a_tate, (20)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (19-3)*3 +a_yoko, (41-3)*3 +a_tate, (19+3)*3 +a_yoko, (41+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (35)*3 +a_yoko, (40)*3 +a_tate, (26)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (26)*3 +a_yoko, (50)*3 +a_tate, (35)*3 +a_yoko, (56)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (56-3)*3 +a_tate, (34+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (35)*3 +a_yoko, (40)*3 +a_tate, (23)*3 +a_yoko, (47)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (23)*3 +a_yoko, (47)*3 +a_tate, (30)*3 +a_yoko, (56)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (28-3)*3 +a_yoko, (56-3)*3 +a_tate, (28+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (17-12)*3 +a_yoko, (20-12)*3 +a_tate, (17+12)*3 +a_yoko, (20+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (17-10)*3 +a_yoko, (20-10)*3 +a_tate, (17+10)*3 +a_yoko, (20+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (17 )*3 +a_yoko, (20 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 12:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (19)*3 +a_yoko, (24)*3 +a_tate, (46)*3 +a_yoko, (28)*3 +a_tate, (46)*3 +a_yoko, (18)*3 +a_tate ) #体
cv.create_line ( (31)*3 +a_yoko, (26)*3 +a_tate, (26)*3 +a_yoko, (34)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (26)*3 +a_yoko, (34)*3 +a_tate, (20)*3 +a_yoko, (34)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (20-3)*3 +a_yoko, (33-3)*3 +a_tate, (20+3)*3 +a_yoko, (33+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (30)*3 +a_yoko, (26)*3 +a_tate, (31)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (31)*3 +a_yoko, (35)*3 +a_tate, (25)*3 +a_yoko, (38)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (23-3)*3 +a_yoko, (37-3)*3 +a_tate, (23+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (45)*3 +a_yoko, (24)*3 +a_tate, (35)*3 +a_yoko, (34)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (35)*3 +a_yoko, (34)*3 +a_tate, (46)*3 +a_yoko, (33)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (46-3)*3 +a_yoko, (34-3)*3 +a_tate, (46+3)*3 +a_yoko, (34+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (44)*3 +a_yoko, (23)*3 +a_tate, (34)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (34)*3 +a_yoko, (32)*3 +a_tate, (45)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (45-3)*3 +a_yoko, (36-3)*3 +a_tate, (45+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (15-12)*3 +a_yoko, (27-12)*3 +a_tate, (15+12)*3 +a_yoko, (27+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (15-10)*3 +a_yoko, (27-10)*3 +a_tate, (15+10)*3 +a_yoko, (27+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (15 )*3 +a_yoko, (27 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 13:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (12)*3 +a_yoko, (29)*3 +a_tate, (25)*3 +a_yoko, (6)*3 +a_tate, (16)*3 +a_yoko, (3)*3 +a_tate ) #体
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (16)*3 +a_yoko, (18)*3 +a_tate, (22)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (23-3)*3 +a_yoko, (21-3)*3 +a_tate, (23+3)*3 +a_yoko, (21+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (16)*3 +a_yoko, (18)*3 +a_tate, (22)*3 +a_yoko, (21)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (22-3)*3 +a_yoko, (23-3)*3 +a_tate, (22+3)*3 +a_yoko, (23+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (19)*3 +a_yoko, (5)*3 +a_tate, (22)*3 +a_yoko, (18)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (22)*3 +a_yoko, (18)*3 +a_tate, (26)*3 +a_yoko, (7)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (26-3)*3 +a_yoko, (8-3)*3 +a_tate, (26+3)*3 +a_yoko, (8+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (19)*3 +a_yoko, (5)*3 +a_tate, (23)*3 +a_yoko, (18)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (23)*3 +a_yoko, (18)*3 +a_tate, (30)*3 +a_yoko, (10)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (31-3)*3 +a_yoko, (11-3)*3 +a_tate, (31+3)*3 +a_yoko, (11+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (19-12)*3 +a_yoko, (31-12)*3 +a_tate, (19+12)*3 +a_yoko, (31+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (19-10)*3 +a_yoko, (31-10)*3 +a_tate, (19+10)*3 +a_yoko, (31+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (19 )*3 +a_yoko, (31 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 14:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (27)*3 +a_yoko, (31)*3 +a_tate, (12)*3 +a_yoko, (9)*3 +a_tate, (5)*3 +a_yoko, (16)*3 +a_tate ) #体
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (16)*3 +a_yoko, (18)*3 +a_tate, (22)*3 +a_yoko, (21)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (23-3)*3 +a_yoko, (21-3)*3 +a_tate, (23+3)*3 +a_yoko, (21+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (16)*3 +a_yoko, (19)*3 +a_tate, (22)*3 +a_yoko, (22)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (22-3)*3 +a_yoko, (23-3)*3 +a_tate, (22+3)*3 +a_yoko, (23+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (9)*3 +a_yoko, (14)*3 +a_tate, (22)*3 +a_yoko, (12)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (22)*3 +a_yoko, (12)*3 +a_tate, (14)*3 +a_yoko, (5)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (14-3)*3 +a_yoko, (5-3)*3 +a_tate, (14+3)*3 +a_yoko, (5+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (10)*3 +a_yoko, (12)*3 +a_tate, (23)*3 +a_yoko, (8)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (23)*3 +a_yoko, (8)*3 +a_tate, (13)*3 +a_yoko, (5)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (13-3)*3 +a_yoko, (6-3)*3 +a_tate, (13+3)*3 +a_yoko, (6+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (19-12)*3 +a_yoko, (31-12)*3 +a_tate, (19+12)*3 +a_yoko, (31+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (19-10)*3 +a_yoko, (31-10)*3 +a_tate, (19+10)*3 +a_yoko, (31+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (19 )*3 +a_yoko, (31 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 15:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (36)*3 +a_yoko, (25)*3 +a_tate, (12)*3 +a_yoko, (39)*3 +a_tate, (19)*3 +a_yoko, (46)*3 +a_tate ) #体
cv.create_line ( (25)*3 +a_yoko, (32)*3 +a_tate, (19)*3 +a_yoko, (33)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (19)*3 +a_yoko, (33)*3 +a_tate, (16)*3 +a_yoko, (27)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (17-3)*3 +a_yoko, (27-3)*3 +a_tate, (17+3)*3 +a_yoko, (27+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (24)*3 +a_yoko, (34)*3 +a_tate, (31)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (31)*3 +a_yoko, (37)*3 +a_tate, (29)*3 +a_yoko, (43)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (29-3)*3 +a_yoko, (43-3)*3 +a_tate, (29+3)*3 +a_yoko, (43+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (18)*3 +a_yoko, (42)*3 +a_tate, (10)*3 +a_yoko, (29)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (10)*3 +a_yoko, (29)*3 +a_tate, (6)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (5-3)*3 +a_yoko, (39-3)*3 +a_tate, (5+3)*3 +a_yoko, (39+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (19)*3 +a_yoko, (42)*3 +a_tate, (7)*3 +a_yoko, (49)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (7)*3 +a_yoko, (49)*3 +a_tate, (12)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (10-3)*3 +a_yoko, (60-3)*3 +a_tate, (10+3)*3 +a_yoko, (60+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (34-12)*3 +a_yoko, (20-12)*3 +a_tate, (34+12)*3 +a_yoko, (20+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (34-10)*3 +a_yoko, (20-10)*3 +a_tate, (34+10)*3 +a_yoko, (20+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (34 )*3 +a_yoko, (20 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 16:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (18)*3 +a_yoko, (36)*3 +a_tate, (32)*3 +a_yoko, (59)*3 +a_tate, (39)*3 +a_yoko, (52)*3 +a_tate ) #体
cv.create_line ( (24)*3 +a_yoko, (43)*3 +a_tate, (34)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (34)*3 +a_yoko, (40)*3 +a_tate, (36)*3 +a_yoko, (46)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (37-3)*3 +a_yoko, (48-3)*3 +a_tate, (37+3)*3 +a_yoko, (48+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (24)*3 +a_yoko, (44)*3 +a_tate, (19)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (19)*3 +a_yoko, (53)*3 +a_tate, (11)*3 +a_yoko, (57)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (10-3)*3 +a_yoko, (58-3)*3 +a_tate, (10+3)*3 +a_yoko, (58+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (35)*3 +a_yoko, (54)*3 +a_tate, (21)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (21)*3 +a_yoko, (53)*3 +a_tate, (20)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (18-3)*3 +a_yoko, (64-3)*3 +a_tate, (18+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (35)*3 +a_yoko, (54)*3 +a_tate, (31)*3 +a_yoko, (65)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (31)*3 +a_yoko, (65)*3 +a_tate, (42)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (42-3)*3 +a_yoko, (64-3)*3 +a_tate, (42+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (14-12)*3 +a_yoko, (34-12)*3 +a_tate, (14+12)*3 +a_yoko, (34+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (14-10)*3 +a_yoko, (34-10)*3 +a_tate, (14+10)*3 +a_yoko, (34+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (14 )*3 +a_yoko, (34 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
#スリップ アニメーション
if tekuteku == 21:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (14)*3 +a_yoko, (23)*3 +a_tate, (27)*3 +a_yoko, (47)*3 +a_tate, (35)*3 +a_yoko, (41)*3 +a_tate ) #体
cv.create_line ( (24)*3 +a_yoko, (35)*3 +a_tate, (18)*3 +a_yoko, (42)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (18)*3 +a_yoko, (42)*3 +a_tate, (12)*3 +a_yoko, (42)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (42-3)*3 +a_tate, (12+3)*3 +a_yoko, (42+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (23)*3 +a_yoko, (33)*3 +a_tate, (25)*3 +a_yoko, (42)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (25)*3 +a_yoko, (42)*3 +a_tate, (21)*3 +a_yoko, (46)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (20-3)*3 +a_yoko, (46-3)*3 +a_tate, (20+3)*3 +a_yoko, (46+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (30)*3 +a_yoko, (43)*3 +a_tate, (38)*3 +a_yoko, (56)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (38)*3 +a_yoko, (56)*3 +a_tate, (39)*3 +a_yoko, (56)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (38-3)*3 +a_yoko, (57-3)*3 +a_tate, (38+3)*3 +a_yoko, (57+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (30)*3 +a_yoko, (43)*3 +a_tate, (41)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (41)*3 +a_yoko, (51)*3 +a_tate, (51)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (51-3)*3 +a_yoko, (53-3)*3 +a_tate, (51+3)*3 +a_yoko, (53+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (15-12)*3 +a_yoko, (21-12)*3 +a_tate, (15+12)*3 +a_yoko, (21+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (15-10)*3 +a_yoko, (21-10)*3 +a_tate, (15+10)*3 +a_yoko, (21+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (15 )*3 +a_yoko, (21 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 22:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (10)*3 +a_yoko, (30)*3 +a_tate, (29)*3 +a_yoko, (49)*3 +a_tate, (34)*3 +a_yoko, (41)*3 +a_tate ) #体
cv.create_line ( (20)*3 +a_yoko, (38)*3 +a_tate, (15)*3 +a_yoko, (45)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (15)*3 +a_yoko, (45)*3 +a_tate, (8)*3 +a_yoko, (46)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (9-3)*3 +a_yoko, (45-3)*3 +a_tate, (9+3)*3 +a_yoko, (45+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (20)*3 +a_yoko, (37)*3 +a_tate, (22)*3 +a_yoko, (46)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (22)*3 +a_yoko, (46)*3 +a_tate, (18)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (16-3)*3 +a_yoko, (50-3)*3 +a_tate, (16+3)*3 +a_yoko, (50+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (29)*3 +a_yoko, (43)*3 +a_tate, (41)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (41)*3 +a_yoko, (50)*3 +a_tate, (50)*3 +a_yoko, (43)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (50-3)*3 +a_yoko, (45-3)*3 +a_tate, (50+3)*3 +a_yoko, (45+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (29)*3 +a_yoko, (43)*3 +a_tate, (45)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (45)*3 +a_yoko, (55)*3 +a_tate, (46)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (46-3)*3 +a_yoko, (56-3)*3 +a_tate, (46+3)*3 +a_yoko, (56+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (14-12)*3 +a_yoko, (24-12)*3 +a_tate, (14+12)*3 +a_yoko, (24+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (14-10)*3 +a_yoko, (24-10)*3 +a_tate, (14+10)*3 +a_yoko, (24+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (14 )*3 +a_yoko, (24 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 23:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (12)*3 +a_yoko, (44)*3 +a_tate, (38)*3 +a_yoko, (53)*3 +a_tate, (40)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (22)*3 +a_yoko, (46)*3 +a_tate, (15)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (15)*3 +a_yoko, (52)*3 +a_tate, (5)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (4-3)*3 +a_yoko, (52-3)*3 +a_tate, (4+3)*3 +a_yoko, (52+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (23)*3 +a_yoko, (46)*3 +a_tate, (20)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (20)*3 +a_yoko, (54)*3 +a_tate, (13)*3 +a_yoko, (58)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (12-3)*3 +a_yoko, (58-3)*3 +a_tate, (12+3)*3 +a_yoko, (58+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (36)*3 +a_yoko, (47)*3 +a_tate, (50)*3 +a_yoko, (49)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (50)*3 +a_yoko, (49)*3 +a_tate, (58)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (57-3)*3 +a_yoko, (42-3)*3 +a_tate, (57+3)*3 +a_yoko, (42+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (36)*3 +a_yoko, (48)*3 +a_tate, (48)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (48)*3 +a_yoko, (55)*3 +a_tate, (59)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (59-3)*3 +a_yoko, (53-3)*3 +a_tate, (59+3)*3 +a_yoko, (53+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (15-12)*3 +a_yoko, (38-12)*3 +a_tate, (15+12)*3 +a_yoko, (38+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (15-10)*3 +a_yoko, (38-10)*3 +a_tate, (15+10)*3 +a_yoko, (38+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (15 )*3 +a_yoko, (38 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 24:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (16)*3 +a_yoko, (58)*3 +a_tate, (43)*3 +a_yoko, (62)*3 +a_tate, (43)*3 +a_yoko, (52)*3 +a_tate ) #体
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (5-3)*3 +a_yoko, (58-3)*3 +a_tate, (5+3)*3 +a_yoko, (58+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (26)*3 +a_yoko, (56)*3 +a_tate, (18)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (18)*3 +a_yoko, (63)*3 +a_tate, (9)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (7-3)*3 +a_yoko, (61-3)*3 +a_tate, (7+3)*3 +a_yoko, (61+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (41)*3 +a_yoko, (57)*3 +a_tate, (55)*3 +a_yoko, (58)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (55)*3 +a_yoko, (58)*3 +a_tate, (63)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (63-3)*3 +a_yoko, (51-3)*3 +a_tate, (63+3)*3 +a_yoko, (51+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (58)*3 +a_tate, (55)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (55)*3 +a_yoko, (60)*3 +a_tate, (67)*3 +a_yoko, (58)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (67-3)*3 +a_yoko, (59-3)*3 +a_tate, (67+3)*3 +a_yoko, (59+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (16-12)*3 +a_yoko, (50-12)*3 +a_tate, (16+12)*3 +a_yoko, (50+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (16-10)*3 +a_yoko, (50-10)*3 +a_tate, (16+10)*3 +a_yoko, (50+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (16 )*3 +a_yoko, (50 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 25:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (16)*3 +a_yoko, (62)*3 +a_tate, (43)*3 +a_yoko, (66)*3 +a_tate, (43)*3 +a_yoko, (56)*3 +a_tate ) #体
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (5-3)*3 +a_yoko, (62-3)*3 +a_tate, (5+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (26)*3 +a_yoko, (60)*3 +a_tate, (18)*3 +a_yoko, (67)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (18)*3 +a_yoko, (67)*3 +a_tate, (9)*3 +a_yoko, (66)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (7-3)*3 +a_yoko, (65-3)*3 +a_tate, (7+3)*3 +a_yoko, (65+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (41)*3 +a_yoko, (61)*3 +a_tate, (55)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (55)*3 +a_yoko, (62)*3 +a_tate, (63)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (63-3)*3 +a_yoko, (55-3)*3 +a_tate, (63+3)*3 +a_yoko, (55+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (62)*3 +a_tate, (55)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (55)*3 +a_yoko, (64)*3 +a_tate, (67)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (67-3)*3 +a_yoko, (63-3)*3 +a_tate, (67+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (16-12)*3 +a_yoko, (54-12)*3 +a_tate, (16+12)*3 +a_yoko, (54+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (16-10)*3 +a_yoko, (54-10)*3 +a_tate, (16+10)*3 +a_yoko, (54+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (16 )*3 +a_yoko, (54 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 26:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (16)*3 +a_yoko, (62)*3 +a_tate, (43)*3 +a_yoko, (66)*3 +a_tate, (43)*3 +a_yoko, (56)*3 +a_tate ) #体
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (0)*3 +a_yoko, (0)*3 +a_tate, (0)*3 +a_yoko, (0)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (5-3)*3 +a_yoko, (62-3)*3 +a_tate, (5+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (26)*3 +a_yoko, (60)*3 +a_tate, (18)*3 +a_yoko, (67)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (18)*3 +a_yoko, (67)*3 +a_tate, (9)*3 +a_yoko, (66)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (7-3)*3 +a_yoko, (65-3)*3 +a_tate, (7+3)*3 +a_yoko, (65+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (41)*3 +a_yoko, (61)*3 +a_tate, (55)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (55)*3 +a_yoko, (62)*3 +a_tate, (63)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (63-3)*3 +a_yoko, (55-3)*3 +a_tate, (63+3)*3 +a_yoko, (55+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (62)*3 +a_tate, (55)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (55)*3 +a_yoko, (64)*3 +a_tate, (67)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (67-3)*3 +a_yoko, (63-3)*3 +a_tate, (67+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (16-12)*3 +a_yoko, (54-12)*3 +a_tate, (16+12)*3 +a_yoko, (54+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (16-10)*3 +a_yoko, (54-10)*3 +a_tate, (16+10)*3 +a_yoko, (54+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (16 )*3 +a_yoko, (54 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
#起き上がり アニメーション
if tekuteku == 31:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (18)*3 +a_yoko, (55)*3 +a_tate, (44)*3 +a_yoko, (59)*3 +a_tate, (44)*3 +a_yoko, (50)*3 +a_tate ) #体
cv.create_line ( (29)*3 +a_yoko, (53)*3 +a_tate, (22)*3 +a_yoko, (61)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (61)*3 +a_tate, (14)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (13-3)*3 +a_yoko, (61-3)*3 +a_tate, (13+3)*3 +a_yoko, (61+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (28)*3 +a_yoko, (54)*3 +a_tate, (34)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (34)*3 +a_yoko, (60)*3 +a_tate, (29)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (28-3)*3 +a_yoko, (62-3)*3 +a_tate, (28+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (43)*3 +a_yoko, (54)*3 +a_tate, (34)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (34)*3 +a_yoko, (63)*3 +a_tate, (45)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (62-3)*3 +a_yoko, (63-3)*3 +a_tate, (62+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (43)*3 +a_yoko, (54)*3 +a_tate, (50)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (50)*3 +a_yoko, (64)*3 +a_tate, (61)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (45-3)*3 +a_yoko, (62-3)*3 +a_tate, (45+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (13-12)*3 +a_yoko, (50-12)*3 +a_tate, (13+12)*3 +a_yoko, (50+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (13-10)*3 +a_yoko, (50-10)*3 +a_tate, (13+10)*3 +a_yoko, (50+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (13 )*3 +a_yoko, (50 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 32:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (18)*3 +a_yoko, (52)*3 +a_tate, (45)*3 +a_yoko, (57)*3 +a_tate, (45)*3 +a_yoko, (47)*3 +a_tate ) #体
cv.create_line ( (29)*3 +a_yoko, (52)*3 +a_tate, (23)*3 +a_yoko, (59)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (23)*3 +a_yoko, (59)*3 +a_tate, (16)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (15-3)*3 +a_yoko, (64-3)*3 +a_tate, (15+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (29)*3 +a_yoko, (52)*3 +a_tate, (34)*3 +a_yoko, (58)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (34)*3 +a_yoko, (58)*3 +a_tate, (30)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (64-3)*3 +a_tate, (30+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (43)*3 +a_yoko, (52)*3 +a_tate, (37)*3 +a_yoko, (65)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (37)*3 +a_yoko, (65)*3 +a_tate, (48)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (49-3)*3 +a_yoko, (64-3)*3 +a_tate, (49+3)*3 +a_yoko, (60+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (43)*3 +a_yoko, (53)*3 +a_tate, (49)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (49)*3 +a_yoko, (64)*3 +a_tate, (60)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (60-3)*3 +a_yoko, (64-3)*3 +a_tate, (60+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (15-12)*3 +a_yoko, (43-12)*3 +a_tate, (15+12)*3 +a_yoko, (43+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (15-10)*3 +a_yoko, (43-10)*3 +a_tate, (15+10)*3 +a_yoko, (43+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (15 )*3 +a_yoko, (43 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 33:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (23)*3 +a_yoko, (37)*3 +a_tate, (41)*3 +a_yoko, (57)*3 +a_tate, (47)*3 +a_yoko, (50)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (44)*3 +a_tate, (30)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (30)*3 +a_yoko, (54)*3 +a_tate, (25)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (24-3)*3 +a_yoko, (63-3)*3 +a_tate, (24+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (44)*3 +a_tate, (36)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (36)*3 +a_yoko, (53)*3 +a_tate, (37)*3 +a_yoko, (57)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (37-3)*3 +a_yoko, (59-3)*3 +a_tate, (37+3)*3 +a_yoko, (59+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (43)*3 +a_yoko, (52)*3 +a_tate, (36)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (36)*3 +a_yoko, (64)*3 +a_tate, (48)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (60-3)*3 +a_yoko, (64-3)*3 +a_tate, (60+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (43)*3 +a_yoko, (52)*3 +a_tate, (49)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (49)*3 +a_yoko, (64)*3 +a_tate, (60)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (49-3)*3 +a_yoko, (64-3)*3 +a_tate, (49+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (23-12)*3 +a_yoko, (35-12)*3 +a_tate, (23+12)*3 +a_yoko, (35+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (23-10)*3 +a_yoko, (35-10)*3 +a_tate, (23+10)*3 +a_yoko, (35+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (23 )*3 +a_yoko, (35 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 34:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (23)*3 +a_yoko, (37)*3 +a_tate, (41)*3 +a_yoko, (57)*3 +a_tate, (47)*3 +a_yoko, (50)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (44)*3 +a_tate, (30)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (30)*3 +a_yoko, (51)*3 +a_tate, (25)*3 +a_yoko, (59)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (24-3)*3 +a_yoko, (60-3)*3 +a_tate, (24+3)*3 +a_yoko, (60+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (34)*3 +a_yoko, (44)*3 +a_tate, (38)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (38)*3 +a_yoko, (51)*3 +a_tate, (37)*3 +a_yoko, (56)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (57-3)*3 +a_tate, (36+3)*3 +a_yoko, (57+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (43)*3 +a_yoko, (52)*3 +a_tate, (29)*3 +a_yoko, (54)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (29)*3 +a_yoko, (54)*3 +a_tate, (36)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (57-3)*3 +a_yoko, (64-3)*3 +a_tate, (57+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (43)*3 +a_yoko, (52)*3 +a_tate, (45)*3 +a_yoko, (64)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (45)*3 +a_yoko, (64)*3 +a_tate, (56)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (36-3)*3 +a_yoko, (64-3)*3 +a_tate, (36+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (23-12)*3 +a_yoko, (35-12)*3 +a_tate, (23+12)*3 +a_yoko, (35+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (23-10)*3 +a_yoko, (35-10)*3 +a_tate, (23+10)*3 +a_yoko, (35+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (23 )*3 +a_yoko, (35 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 35:
cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (18)*3 +a_yoko, (36)*3 +a_tate, (32)*3 +a_yoko, (59)*3 +a_tate, (39)*3 +a_yoko, (52)*3 +a_tate ) #体
cv.create_line ( (24)*3 +a_yoko, (43)*3 +a_tate, (34)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (34)*3 +a_yoko, (40)*3 +a_tate, (36)*3 +a_yoko, (46)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (37-3)*3 +a_yoko, (48-3)*3 +a_tate, (37+3)*3 +a_yoko, (48+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (24)*3 +a_yoko, (44)*3 +a_tate, (19)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (19)*3 +a_yoko, (53)*3 +a_tate, (11)*3 +a_yoko, (57)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (10-3)*3 +a_yoko, (58-3)*3 +a_tate, (10+3)*3 +a_yoko, (58+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (35)*3 +a_yoko, (54)*3 +a_tate, (21)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (21)*3 +a_yoko, (53)*3 +a_tate, (20)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (18-3)*3 +a_yoko, (64-3)*3 +a_tate, (18+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (35)*3 +a_yoko, (54)*3 +a_tate, (31)*3 +a_yoko, (65)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (31)*3 +a_yoko, (65)*3 +a_tate, (42)*3 +a_yoko, (63)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (42-3)*3 +a_yoko, (64-3)*3 +a_tate, (42+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (14-12)*3 +a_yoko, (34-12)*3 +a_tate, (14+12)*3 +a_yoko, (34+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (14-10)*3 +a_yoko, (34-10)*3 +a_tate, (14+10)*3 +a_yoko, (34+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (14 )*3 +a_yoko, (34 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
#オープニング アニメーション
if tekuteku == 41: #上に2行く(-2)
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (11)*3 +a_tate, (22)*3 +a_yoko, (34)*3 +a_tate, (31)*3 +a_yoko, (38)*3 +a_tate ) #体
cv.create_line ( (31)*3 +a_yoko, (24)*3 +a_tate, (21)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (21)*3 +a_yoko, (20)*3 +a_tate, (12)*3 +a_yoko, (17)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (10-3)*3 +a_yoko, (15-3)*3 +a_tate, (10+3)*3 +a_yoko, (15+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (26)*3 +a_tate, (39)*3 +a_yoko, (29)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (39)*3 +a_yoko, (29)*3 +a_tate, (37)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (37-3)*3 +a_yoko, (35-3)*3 +a_tate, (39+3)*3 +a_yoko, (44+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (39)*3 +a_yoko, (35)*3 +a_tate, (39)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (39)*3 +a_yoko, (31)*3 +a_tate, (35)*3 +a_yoko, (43)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (34-3)*3 +a_tate, (34+3)*3 +a_yoko, (34+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (28)*3 +a_yoko, (35)*3 +a_tate, (42)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (42)*3 +a_yoko, (35)*3 +a_tate, (37)*3 +a_yoko, (45)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (36-3)*3 +a_yoko, (46-3)*3 +a_tate, (36+3)*3 +a_yoko, (46+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (41-12)*3 +a_yoko, (14-12)*3 +a_tate, (41+12)*3 +a_yoko, (14+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (41-10)*3 +a_yoko, (14-10)*3 +a_tate, (41+10)*3 +a_yoko, (14+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (41 )*3 +a_yoko, (14 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 42:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (28)*3 +a_yoko, (39)*3 +a_tate, (33)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (33)*3 +a_yoko, (51)*3 +a_tate, (31)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (31-3)*3 +a_yoko, (64-3)*3 +a_tate, (31+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (38)*3 +a_tate, (29)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (29)*3 +a_yoko, (51)*3 +a_tate, (28)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (27-3)*3 +a_yoko, (64-3)*3 +a_tate, (27+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 43:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (28)*3 +a_yoko, (39)*3 +a_tate, (42)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (42)*3 +a_yoko, (39)*3 +a_tate, (39)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (40-3)*3 +a_yoko, (51-3)*3 +a_tate, (40+3)*3 +a_yoko, (51+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (38)*3 +a_tate, (29)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (29)*3 +a_yoko, (51)*3 +a_tate, (28)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (27-3)*3 +a_yoko, (64-3)*3 +a_tate, (27+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 44:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (17)*3 +a_tate, (22)*3 +a_yoko, (40)*3 +a_tate, (31)*3 +a_yoko, (44)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (22)*3 +a_yoko, (25)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (25)*3 +a_tate, (13)*3 +a_yoko, (21)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (20-3)*3 +a_tate, (12+3)*3 +a_yoko, (20+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (30)*3 +a_tate, (40)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (32)*3 +a_tate, (37)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (37-3)*3 +a_tate, (36+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (29)*3 +a_yoko, (39)*3 +a_tate, (38)*3 +a_yoko, (49)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (38)*3 +a_yoko, (49)*3 +a_tate, (35)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (60-3)*3 +a_tate, (34+3)*3 +a_yoko, (60+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (39)*3 +a_tate, (19)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (19)*3 +a_yoko, (51)*3 +a_tate, (13)*3 +a_yoko, (61)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (12-3)*3 +a_yoko, (64-3)*3 +a_tate, (12+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (18-12)*3 +a_tate, (42+12)*3 +a_yoko, (18+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (18-10)*3 +a_tate, (42+10)*3 +a_yoko, (18+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (18 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 45:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (28)*3 +a_yoko, (39)*3 +a_tate, (33)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (33)*3 +a_yoko, (51)*3 +a_tate, (31)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (31-3)*3 +a_yoko, (64-3)*3 +a_tate, (31+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (38)*3 +a_tate, (29)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (29)*3 +a_yoko, (51)*3 +a_tate, (28)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (27-3)*3 +a_yoko, (64-3)*3 +a_tate, (27+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 46:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (26)*3 +a_yoko, (41)*3 +a_tate, (20)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (20)*3 +a_yoko, (52)*3 +a_tate, (14)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (13-3)*3 +a_yoko, (64-3)*3 +a_tate, (13+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (29)*3 +a_yoko, (41)*3 +a_tate, (39)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (39)*3 +a_yoko, (50)*3 +a_tate, (32)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (32-3)*3 +a_yoko, (62-3)*3 +a_tate, (32+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 47:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (28)*3 +a_yoko, (39)*3 +a_tate, (42)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (42)*3 +a_yoko, (39)*3 +a_tate, (39)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (40-3)*3 +a_yoko, (51-3)*3 +a_tate, (40+3)*3 +a_yoko, (51+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (38)*3 +a_tate, (29)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (29)*3 +a_yoko, (51)*3 +a_tate, (28)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (27-3)*3 +a_yoko, (64-3)*3 +a_tate, (27+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 48:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (17)*3 +a_tate, (22)*3 +a_yoko, (40)*3 +a_tate, (31)*3 +a_yoko, (44)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (22)*3 +a_yoko, (25)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (25)*3 +a_tate, (13)*3 +a_yoko, (21)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (20-3)*3 +a_tate, (12+3)*3 +a_yoko, (20+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (30)*3 +a_tate, (40)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (32)*3 +a_tate, (37)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (37-3)*3 +a_tate, (36+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (29)*3 +a_yoko, (39)*3 +a_tate, (38)*3 +a_yoko, (49)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (38)*3 +a_yoko, (49)*3 +a_tate, (35)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (60-3)*3 +a_tate, (34+3)*3 +a_yoko, (60+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (39)*3 +a_tate, (19)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (19)*3 +a_yoko, (51)*3 +a_tate, (13)*3 +a_yoko, (61)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (12-3)*3 +a_yoko, (64-3)*3 +a_tate, (12+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (18-12)*3 +a_tate, (42+12)*3 +a_yoko, (18+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (18-10)*3 +a_tate, (42+10)*3 +a_yoko, (18+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (18 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 49:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (28)*3 +a_yoko, (39)*3 +a_tate, (33)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (33)*3 +a_yoko, (51)*3 +a_tate, (31)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (31-3)*3 +a_yoko, (64-3)*3 +a_tate, (31+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (24)*3 +a_yoko, (38)*3 +a_tate, (29)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (29)*3 +a_yoko, (51)*3 +a_tate, (28)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (27-3)*3 +a_yoko, (64-3)*3 +a_tate, (27+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 50:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (37)*3 +a_yoko, (16)*3 +a_tate, (22)*3 +a_yoko, (39)*3 +a_tate, (31)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (33)*3 +a_yoko, (28)*3 +a_tate, (22)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (22)*3 +a_yoko, (24)*3 +a_tate, (13)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (12-3)*3 +a_yoko, (19-3)*3 +a_tate, (12+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (29)*3 +a_tate, (40)*3 +a_yoko, (31)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (40)*3 +a_yoko, (31)*3 +a_tate, (37)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (36-3)*3 +a_yoko, (36-3)*3 +a_tate, (36+3)*3 +a_yoko, (36+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (26)*3 +a_yoko, (41)*3 +a_tate, (20)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (20)*3 +a_yoko, (52)*3 +a_tate, (14)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (13-3)*3 +a_yoko, (64-3)*3 +a_tate, (13+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (29)*3 +a_yoko, (41)*3 +a_tate, (39)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (39)*3 +a_yoko, (50)*3 +a_tate, (32)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (32-3)*3 +a_yoko, (62-3)*3 +a_tate, (32+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (42-12)*3 +a_yoko, (17-12)*3 +a_tate, (42+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (42-10)*3 +a_yoko, (17-10)*3 +a_tate, (42+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (42 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
#オープニング アニメーション
if tekuteku == 51: #上に2行く(-2)
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (11)*3 +a_tate, (35)*3 +a_yoko, (40)*3 +a_tate, (43)*3 +a_yoko, (35)*3 +a_tate ) #体
cv.create_line ( (35)*3 +a_yoko, (25)*3 +a_tate, (45)*3 +a_yoko, (21)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (45)*3 +a_yoko, (21)*3 +a_tate, (54)*3 +a_yoko, (14)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (54-3)*3 +a_yoko, (14-3)*3 +a_tate, (54+3)*3 +a_yoko, (16+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (33)*3 +a_yoko, (26)*3 +a_tate, (26)*3 +a_yoko, (29)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (29)*3 +a_tate, (24)*3 +a_yoko, (34)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (24-3)*3 +a_yoko, (35-3)*3 +a_tate, (24+3)*3 +a_yoko, (35+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (39)*3 +a_yoko, (35)*3 +a_tate, (25)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (25)*3 +a_yoko, (35)*3 +a_tate, (30)*3 +a_yoko, (45)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (31-3)*3 +a_yoko, (46-3)*3 +a_tate, (31+3)*3 +a_yoko, (46+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (33)*3 +a_tate, (27)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (27)*3 +a_yoko, (32)*3 +a_tate, (33)*3 +a_yoko, (43)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (34-3)*3 +a_yoko, (44-3)*3 +a_tate, (34+3)*3 +a_yoko, (44+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (14-12)*3 +a_tate, (25+12)*3 +a_yoko, (14+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (14-10)*3 +a_tate, (25+10)*3 +a_yoko, (14+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (14 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 52:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (16)*3 +a_tate, (35)*3 +a_yoko, (43)*3 +a_tate, (44)*3 +a_yoko, (38)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (28)*3 +a_tate, (44)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (24)*3 +a_tate, (52)*3 +a_yoko, (19)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (19-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (29)*3 +a_tate, (26)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (32)*3 +a_tate, (29)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (37-3)*3 +a_tate, (30+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (36)*3 +a_yoko, (39)*3 +a_tate, (32)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (32)*3 +a_yoko, (52)*3 +a_tate, (34)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (64-3)*3 +a_tate, (34+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (38)*3 +a_tate, (37)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (37)*3 +a_yoko, (50)*3 +a_tate, (39)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (39-3)*3 +a_yoko, (64-3)*3 +a_tate, (39+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (17-12)*3 +a_tate, (25+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (17-10)*3 +a_tate, (25+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 53:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (16)*3 +a_tate, (35)*3 +a_yoko, (43)*3 +a_tate, (44)*3 +a_yoko, (38)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (28)*3 +a_tate, (44)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (24)*3 +a_tate, (52)*3 +a_yoko, (19)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (19-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (29)*3 +a_tate, (26)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (32)*3 +a_tate, (29)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (37-3)*3 +a_tate, (30+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (37)*3 +a_yoko, (39)*3 +a_tate, (23)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (23)*3 +a_yoko, (39)*3 +a_tate, (28)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (27-3)*3 +a_yoko, (50-3)*3 +a_tate, (27+3)*3 +a_yoko, (50+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (38)*3 +a_tate, (37)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (37)*3 +a_yoko, (50)*3 +a_tate, (39)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (39-3)*3 +a_yoko, (64-3)*3 +a_tate, (39+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (17-12)*3 +a_tate, (25+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (17-10)*3 +a_tate, (25+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 54:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (17)*3 +a_tate, (35)*3 +a_yoko, (44)*3 +a_tate, (44)*3 +a_yoko, (39)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (29)*3 +a_tate, (44)*3 +a_yoko, (25)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (25)*3 +a_tate, (52)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (20-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (30)*3 +a_tate, (26)*3 +a_yoko, (33)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (33)*3 +a_tate, (29)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (38-3)*3 +a_tate, (30+3)*3 +a_yoko, (38+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (38)*3 +a_yoko, (41)*3 +a_tate, (26)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (26)*3 +a_yoko, (50)*3 +a_tate, (30)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (31-3)*3 +a_yoko, (62-3)*3 +a_tate, (31+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (40)*3 +a_yoko, (40)*3 +a_tate, (47)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (47)*3 +a_yoko, (52)*3 +a_tate, (53)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (54-3)*3 +a_yoko, (64-3)*3 +a_tate, (54+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (18-12)*3 +a_tate, (25+12)*3 +a_yoko, (18+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (18-10)*3 +a_tate, (25+10)*3 +a_yoko, (18+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (18 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 55:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (16)*3 +a_tate, (35)*3 +a_yoko, (43)*3 +a_tate, (44)*3 +a_yoko, (38)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (28)*3 +a_tate, (44)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (24)*3 +a_tate, (52)*3 +a_yoko, (19)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (19-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (29)*3 +a_tate, (26)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (32)*3 +a_tate, (29)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (37-3)*3 +a_tate, (30+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (37)*3 +a_yoko, (38)*3 +a_tate, (37)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (37)*3 +a_yoko, (52)*3 +a_tate, (40)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (50-3)*3 +a_tate, (34+3)*3 +a_yoko, (50+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (38)*3 +a_tate, (28)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (28)*3 +a_yoko, (40)*3 +a_tate, (33)*3 +a_yoko, (49)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (40-3)*3 +a_yoko, (64-3)*3 +a_tate, (40+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (17-12)*3 +a_tate, (25+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (17-10)*3 +a_tate, (25+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 56:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (19)*3 +a_tate, (35)*3 +a_yoko, (46)*3 +a_tate, (44)*3 +a_yoko, (41)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (31)*3 +a_tate, (44)*3 +a_yoko, (27)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (27)*3 +a_tate, (52)*3 +a_yoko, (22)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (22-3)*3 +a_tate, (53+3)*3 +a_yoko, (22+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (32)*3 +a_tate, (26)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (35)*3 +a_tate, (29)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (40-3)*3 +a_tate, (30+3)*3 +a_yoko, (40+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (38)*3 +a_yoko, (41)*3 +a_tate, (45)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (45)*3 +a_yoko, (53)*3 +a_tate, (50)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (51-3)*3 +a_yoko, (64-3)*3 +a_tate, (51+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (44)*3 +a_yoko, (41)*3 +a_tate, (25)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (25)*3 +a_yoko, (51)*3 +a_tate, (31)*3 +a_yoko, (61)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (31-3)*3 +a_yoko, (63-3)*3 +a_tate, (31+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (20-12)*3 +a_tate, (25+12)*3 +a_yoko, (20+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (20-10)*3 +a_tate, (25+10)*3 +a_yoko, (20+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (20 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 57:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (16)*3 +a_tate, (35)*3 +a_yoko, (43)*3 +a_tate, (44)*3 +a_yoko, (38)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (28)*3 +a_tate, (44)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (24)*3 +a_tate, (52)*3 +a_yoko, (19)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (19-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (29)*3 +a_tate, (26)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (32)*3 +a_tate, (29)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (37-3)*3 +a_tate, (30+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (37)*3 +a_yoko, (39)*3 +a_tate, (23)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (23)*3 +a_yoko, (39)*3 +a_tate, (28)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (27-3)*3 +a_yoko, (50-3)*3 +a_tate, (27+3)*3 +a_yoko, (50+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (38)*3 +a_tate, (37)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (37)*3 +a_yoko, (50)*3 +a_tate, (39)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (39-3)*3 +a_yoko, (64-3)*3 +a_tate, (39+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (17-12)*3 +a_tate, (25+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (17-10)*3 +a_tate, (25+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 58:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (17)*3 +a_tate, (35)*3 +a_yoko, (44)*3 +a_tate, (44)*3 +a_yoko, (39)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (29)*3 +a_tate, (44)*3 +a_yoko, (25)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (25)*3 +a_tate, (52)*3 +a_yoko, (20)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (20-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (30)*3 +a_tate, (26)*3 +a_yoko, (33)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (33)*3 +a_tate, (29)*3 +a_yoko, (37)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (38-3)*3 +a_tate, (30+3)*3 +a_yoko, (38+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (38)*3 +a_yoko, (41)*3 +a_tate, (26)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (26)*3 +a_yoko, (50)*3 +a_tate, (30)*3 +a_yoko, (60)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (31-3)*3 +a_yoko, (62-3)*3 +a_tate, (31+3)*3 +a_yoko, (62+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (40)*3 +a_yoko, (40)*3 +a_tate, (47)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (47)*3 +a_yoko, (52)*3 +a_tate, (53)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (54-3)*3 +a_yoko, (64-3)*3 +a_tate, (54+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (18-12)*3 +a_tate, (25+12)*3 +a_yoko, (18+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (18-10)*3 +a_tate, (25+10)*3 +a_yoko, (18+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (18 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 59:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (16)*3 +a_tate, (35)*3 +a_yoko, (43)*3 +a_tate, (44)*3 +a_yoko, (38)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (28)*3 +a_tate, (44)*3 +a_yoko, (24)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (24)*3 +a_tate, (52)*3 +a_yoko, (19)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (19-3)*3 +a_tate, (53+3)*3 +a_yoko, (19+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (29)*3 +a_tate, (26)*3 +a_yoko, (32)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (32)*3 +a_tate, (29)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (37-3)*3 +a_tate, (30+3)*3 +a_yoko, (37+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (37)*3 +a_yoko, (38)*3 +a_tate, (37)*3 +a_yoko, (52)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (37)*3 +a_yoko, (52)*3 +a_tate, (40)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (34-3)*3 +a_yoko, (50-3)*3 +a_tate, (34+3)*3 +a_yoko, (50+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (41)*3 +a_yoko, (38)*3 +a_tate, (28)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (28)*3 +a_yoko, (40)*3 +a_tate, (33)*3 +a_yoko, (49)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (40-3)*3 +a_yoko, (64-3)*3 +a_tate, (40+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (17-12)*3 +a_tate, (25+12)*3 +a_yoko, (17+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (17-10)*3 +a_tate, (25+10)*3 +a_yoko, (17+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 60:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (28)*3 +a_yoko, (19)*3 +a_tate, (35)*3 +a_yoko, (46)*3 +a_tate, (44)*3 +a_yoko, (41)*3 +a_tate ) #体
cv.create_line ( (34)*3 +a_yoko, (31)*3 +a_tate, (44)*3 +a_yoko, (27)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (44)*3 +a_yoko, (27)*3 +a_tate, (52)*3 +a_yoko, (22)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (53-3)*3 +a_yoko, (22-3)*3 +a_tate, (53+3)*3 +a_yoko, (22+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (32)*3 +a_yoko, (32)*3 +a_tate, (26)*3 +a_yoko, (35)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (26)*3 +a_yoko, (35)*3 +a_tate, (29)*3 +a_yoko, (39)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (30-3)*3 +a_yoko, (40-3)*3 +a_tate, (30+3)*3 +a_yoko, (40+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (38)*3 +a_yoko, (41)*3 +a_tate, (45)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (45)*3 +a_yoko, (53)*3 +a_tate, (50)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (51-3)*3 +a_yoko, (64-3)*3 +a_tate, (51+3)*3 +a_yoko, (64+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (44)*3 +a_yoko, (41)*3 +a_tate, (25)*3 +a_yoko, (51)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (25)*3 +a_yoko, (51)*3 +a_tate, (31)*3 +a_yoko, (61)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (31-3)*3 +a_yoko, (63-3)*3 +a_tate, (31+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (20-12)*3 +a_tate, (25+12)*3 +a_yoko, (20+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (20-10)*3 +a_tate, (25+10)*3 +a_yoko, (20+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (20 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 61:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (25)*3 +a_yoko, (16)*3 +a_tate, (23)*3 +a_yoko, (44)*3 +a_tate, (33)*3 +a_yoko, (43)*3 +a_tate ) #体
cv.create_line ( (25)*3 +a_yoko, (31)*3 +a_tate, (15)*3 +a_yoko, (29)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (15)*3 +a_yoko, (29)*3 +a_tate, ( 8)*3 +a_yoko, (25)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( ( 7-3)*3 +a_yoko, (24-3)*3 +a_tate, ( 7+3)*3 +a_yoko, (24+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (28)*3 +a_yoko, (31)*3 +a_tate, (36)*3 +a_yoko, (27)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (36)*3 +a_yoko, (27)*3 +a_tate, (43)*3 +a_yoko, (21)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (44-3)*3 +a_yoko, (20-3)*3 +a_tate, (44+3)*3 +a_yoko, (20+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (29)*3 +a_yoko, (40)*3 +a_tate, (31)*3 +a_yoko, (53)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (31)*3 +a_yoko, (53)*3 +a_tate, (37)*3 +a_yoko, (62)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (37-3)*3 +a_yoko, (63-3)*3 +a_tate, (37+3)*3 +a_yoko, (63+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (26)*3 +a_yoko, (41)*3 +a_tate, (14)*3 +a_yoko, (47)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (14)*3 +a_yoko, (47)*3 +a_tate, (11)*3 +a_yoko, (55)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (22-3)*3 +a_yoko, (55-3)*3 +a_tate, (22+3)*3 +a_yoko, (55+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (25-12)*3 +a_yoko, (16-12)*3 +a_tate, (25+12)*3 +a_yoko, (16+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (25-10)*3 +a_yoko, (16-10)*3 +a_tate, (25+10)*3 +a_yoko, (16+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (25 )*3 +a_yoko, (16 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
if tekuteku == 71:
# cv.create_oval ( (15)*3 +a_yoko, (66)*3 +a_tate, (45)*3 +a_yoko, (70)*3 +a_tate, fill='black', width=0 ) #影
cv.create_polygon ( (25)*3 +a_yoko, (18)*3 +a_tate, (26)*3 +a_yoko, (44)*3 +a_tate, (35)*3 +a_yoko, (42)*3 +a_tate ) #体
cv.create_line ( (27)*3 +a_yoko, (28)*3 +a_tate, (28)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #右腕上
cv.create_line ( (28)*3 +a_yoko, (36)*3 +a_tate, (24)*3 +a_yoko, (41)*3 +a_tate, fill='black', width = 5 ) #右腕下
cv.create_oval ( (14-3)*3 +a_yoko, (42-3)*3 +a_tate, (14+3)*3 +a_yoko, (42+3)*3 +a_tate, fill='black') #右拳
cv.create_line ( (29)*3 +a_yoko, (28)*3 +a_tate, (29)*3 +a_yoko, (36)*3 +a_tate, fill='black', width = 5 ) #左腕上
cv.create_line ( (29)*3 +a_yoko, (36)*3 +a_tate, (29)*3 +a_yoko, (40)*3 +a_tate, fill='black', width = 5 ) #左腕下
cv.create_oval ( (29-3)*3 +a_yoko, (40-3)*3 +a_tate, (29+3)*3 +a_yoko, (40+3)*3 +a_tate, fill='black') #左拳
cv.create_line ( (29)*3 +a_yoko, (40)*3 +a_tate, (27)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #右脚上
cv.create_line ( (27)*3 +a_yoko, (50)*3 +a_tate, (27)*3 +a_yoko, (61)*3 +a_tate, fill='black', width = 5 ) #右脚下
cv.create_oval ( (32-3)*3 +a_yoko, (60-3)*3 +a_tate, (32+3)*3 +a_yoko, (60+3)*3 +a_tate, fill='black') #右甲
cv.create_line ( (33)*3 +a_yoko, (40)*3 +a_tate, (32)*3 +a_yoko, (50)*3 +a_tate, fill='black', width = 5 ) #左脚上
cv.create_line ( (32)*3 +a_yoko, (50)*3 +a_tate, (32)*3 +a_yoko, (59)*3 +a_tate, fill='black', width = 5 ) #左脚下
cv.create_oval ( (28-3)*3 +a_yoko, (61-3)*3 +a_tate, (28+3)*3 +a_yoko, (61+3)*3 +a_tate, fill='black') #左甲
cv.create_oval ( (21-12)*3 +a_yoko, (17-12)*3 +a_tate, (21+12)*3 +a_yoko, (27+12)*3 +a_tate, fill='black') #頭
cv.create_oval ( (21-10)*3 +a_yoko, (17-10)*3 +a_tate, (21+10)*3 +a_yoko, (27+10)*3 +a_tate, fill='white') #頭
cv.create_text ( (21 )*3 +a_yoko, (17 )*3 +a_tate, text="そ", font=("Helvetica", 45,"bold" ) ) #そ
#変数
tekuteku = 1
a_tate = 100
a_yoko = 610
a_iti = 1
mode = 1
move = -30
line = 3
hiscore = 0
score = 0
life =3
runspeed = 50
scene = 1
op_count = 1
#リスタート
def re_start():
global tekuteku
global a_tate
global a_yoko
global a_iti
global mode
global move
global line
global score
global life
global runspeed
global scene
global op_count
global Zmap
tekuteku = 1
a_tate = 100
a_yoko = 610
a_iti = 1
mode = 1
move = -30
line = 3
score = 0
life =3
runspeed = 1000
scene = 2
Zmap = [ 0, 2, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, ]
#モード(人の動き)を変更する。
def mode_change():
global tekuteku
tekuteku = tekuteku + 1
global mode
global life
if tekuteku == 7:
mode = 1
tekuteku = 1
if tekuteku == 17:
mode = 1
tekuteku = 1
if tekuteku == 27:
mode = 4
tekuteku = 31
life = life -1
if tekuteku == 36:
mode = 1
tekuteku = 1
#キー操作
def input_key1(event):
global tekuteku
global mode
global life
if mode == 1:
mode = 2
tekuteku = 11
def input_key2(event):
global line
if line >= 2:
line = line -1
def input_key3(event):
global line
if line <= 4:
line = line +1
def input_key4(event): #リセットボタン
re_start()
win.bind('<Key-1>',input_key1)
win.bind('<Key-2>',input_key2)
win.bind('<Key-3>',input_key3)
win.bind('<Key-4>',input_key4)
#走るモーションの関数
def hasiru():
global a_yoko
global a_tate
global mode
global move
global line
global Zmap
if mode ==4:
move = 0
elif mode ==1:
move = -30
elif mode ==2:
move = -30
elif mode ==3:
move = -30
#画面端での繰り返し
a_yoko = a_yoko +move
if (a_yoko <= -300)and(a_yoko >= -400):
a_yoko = 700
Zmap = copy.deepcopy(Amap[(random.randint(0,29))])
if line == 1:
a_tate = 0
elif line == 2:
a_tate = 50
elif line == 3:
a_tate = 100
elif line == 4:
a_tate = 150
elif line == 5:
a_tate = 200
#難易度操作
def nanido():
global score
global runspeed
if ( 0 <= score )and( 200 >= score ):
runspeed = 130
elif ( 201 <= score )and( 400 >= score ):
runspeed = 110
elif ( 401 <= score )and( 600 >= score ):
runspeed = 90
elif ( 601 <= score )and( 800 >= score ):
runspeed = 80
elif ( 801 <= score )and( 1000 >= score ):
runspeed = 70
elif ( 1001 <= score )and( 1200 >= score ):
runspeed = 60
elif ( 1201 <= score ):
runspeed = 50
#オープニング
def OP():
global runspeed
global tekuteku
global a_yoko
global a_tate
global hiscore
runspeed = 200
tekuteku = tekuteku + 1
a_tate = 250
a_yoko = 250
if (tekuteku <=40):
tekuteku = 41
elif(tekuteku ==61):
tekuteku = 41
runman()
cv.create_text ( 110, 120, text="流", font=("Helvetica", 60,"bold" ) )
cv.create_text ( 210, 150, text="し", font=("Helvetica", 60,"bold" ) )
cv.create_text ( 310, 120, text="そ", font=("Helvetica", 60,"bold" ) )
cv.create_text ( 410, 150, text="う", font=("Helvetica", 60,"bold" ) )
cv.create_text ( 510, 120, text="め", font=("Helvetica", 60,"bold" ) )
cv.create_text ( 610, 150, text="ん", font=("Helvetica", 60,"bold" ) )
cv.create_text ( 350, 220, text="PRESS START", font=("Helvetica", 30,"bold" ) )
cv.create_text ( 550, 25, text="HI SCORE", font=("Helvetica", 20, "bold") )
cv.create_text ( 670, 25, text=hiscore , font=("Helvetica", 25, "bold") )
#終了後のカウンター
def OP_COUNT():
global life
global op_count
global scene
global score
global hiscore
if life == 0:
op_count = op_count +1
if op_count == 50:
scene = 1
op_count = 1
if hiscore <= score:
hiscore = score
#ゲームの繰り返し処理
def game_loop():
global scene
if scene ==1:
draw_screen()
OP()
if scene ==2:
draw_screen()
haikei()
life_draw()
map()
if life>=1:
runman()
hasiru()
mode_change()
nanido()
OP_COUNT()
touch()
win.after(runspeed, game_loop) #runspeedがディレイ。ここを操作する事でゲームの難易度操作
#ドット絵描画関数の実行
game_loop()
#ウィンドウの表示
win.mainloop()
| 77.569806
| 138
| 0.517387
| 22,354
| 116,122
| 2.506487
| 0.01266
| 0.109263
| 0.163948
| 0.120471
| 0.938997
| 0.913689
| 0.906265
| 0.869998
| 0.846047
| 0.83532
| 0
| 0.157827
| 0.254715
| 116,122
| 1,496
| 139
| 77.621658
| 0.489589
| 0.040526
| 0
| 0.496629
| 0
| 0
| 0.039721
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01573
| false
| 0
| 0.002247
| 0
| 0.017978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb97990ed3704f69a46d65c8dc98861538e9d411
| 25,358
|
py
|
Python
|
swagger_client/api/teams_api.py
|
swat5421/swagger_portainer
|
e18b287dc906e171077912677515469ee3f4e5c2
|
[
"RSA-MD"
] | null | null | null |
swagger_client/api/teams_api.py
|
swat5421/swagger_portainer
|
e18b287dc906e171077912677515469ee3f4e5c2
|
[
"RSA-MD"
] | null | null | null |
swagger_client/api/teams_api.py
|
swat5421/swagger_portainer
|
e18b287dc906e171077912677515469ee3f4e5c2
|
[
"RSA-MD"
] | null | null | null |
# coding: utf-8
"""
Portainer API
Portainer API is an HTTP API served by Portainer. It is used by the Portainer UI and everything you can do with the UI can be done using the HTTP API. Examples are available at https://gist.github.com/deviantony/77026d402366b4b43fa5918d41bc42f8 You can find out more about Portainer at [http://portainer.io](http://portainer.io) and get some support on [Slack](http://portainer.io/slack/). # Authentication Most of the API endpoints require to be authenticated as well as some level of authorization to be used. Portainer API uses JSON Web Token to manage authentication and thus requires you to provide a token in the **Authorization** header of each request with the **Bearer** authentication mechanism. Example: ``` Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6MSwidXNlcm5hbWUiOiJhZG1pbiIsInJvbGUiOjEsImV4cCI6MTQ5OTM3NjE1NH0.NJ6vE8FY1WG6jsRQzfMqeatJ4vh2TWAeeYfDhP71YEE ``` # Security Each API endpoint has an associated access policy, it is documented in the description of each endpoint. Different access policies are available: * Public access * Authenticated access * Restricted access * Administrator access ### Public access No authentication is required to access the endpoints with this access policy. ### Authenticated access Authentication is required to access the endpoints with this access policy. ### Restricted access Authentication is required to access the endpoints with this access policy. Extra-checks might be added to ensure access to the resource is granted. Returned data might also be filtered. ### Administrator access Authentication as well as an administrator role are required to access the endpoints with this access policy. # Execute Docker requests Portainer **DO NOT** expose specific endpoints to manage your Docker resources (create a container, remove a volume, etc...). Instead, it acts as a reverse-proxy to the Docker HTTP API. This means that you can execute Docker requests **via** the Portainer HTTP API. To do so, you can use the `/endpoints/{id}/docker` Portainer API endpoint (which is not documented below due to Swagger limitations). This endpoint has a restricted access policy so you still need to be authenticated to be able to query this endpoint. Any query on this endpoint will be proxied to the Docker API of the associated endpoint (requests and responses objects are the same as documented in the Docker API). **NOTE**: You can find more information on how to query the Docker API in the [Docker official documentation](https://docs.docker.com/engine/api/v1.30/) as well as in [this Portainer example](https://gist.github.com/deviantony/77026d402366b4b43fa5918d41bc42f8). # noqa: E501
OpenAPI spec version: 1.24.1
Contact: info@portainer.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class TeamsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def team_create(self, body, **kwargs): # noqa: E501
"""Create a new team # noqa: E501
Create a new team. **Access policy**: administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TeamCreateRequest body: Team details (required)
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.team_create_with_http_info(body, **kwargs) # noqa: E501
return data
def team_create_with_http_info(self, body, **kwargs): # noqa: E501
"""Create a new team # noqa: E501
Create a new team. **Access policy**: administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TeamCreateRequest body: Team details (required)
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `team_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwt'] # noqa: E501
return self.api_client.call_api(
'/teams', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def team_delete(self, id, **kwargs): # noqa: E501
"""Remove a team # noqa: E501
Remove a team. **Access policy**: administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_delete_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.team_delete_with_http_info(id, **kwargs) # noqa: E501
return data
def team_delete_with_http_info(self, id, **kwargs): # noqa: E501
"""Remove a team # noqa: E501
Remove a team. **Access policy**: administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_delete_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `team_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['jwt'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def team_inspect(self, id, **kwargs): # noqa: E501
"""Inspect a team # noqa: E501
Retrieve details about a team. Access is only available for administrator and leaders of that team. **Access policy**: restricted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_inspect(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_inspect_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.team_inspect_with_http_info(id, **kwargs) # noqa: E501
return data
def team_inspect_with_http_info(self, id, **kwargs): # noqa: E501
"""Inspect a team # noqa: E501
Retrieve details about a team. Access is only available for administrator and leaders of that team. **Access policy**: restricted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_inspect_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_inspect" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `team_inspect`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwt'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def team_list(self, **kwargs): # noqa: E501
"""List teams # noqa: E501
List teams. For non-administrator users, will only list the teams they are member of. **Access policy**: restricted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: TeamListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.team_list_with_http_info(**kwargs) # noqa: E501
return data
def team_list_with_http_info(self, **kwargs): # noqa: E501
"""List teams # noqa: E501
List teams. For non-administrator users, will only list the teams they are member of. **Access policy**: restricted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: TeamListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwt'] # noqa: E501
return self.api_client.call_api(
'/teams', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamListResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def team_memberships_inspect(self, id, **kwargs): # noqa: E501
"""Inspect a team memberships # noqa: E501
Inspect a team memberships. Access is only available for administrator and leaders of that team. **Access policy**: restricted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_memberships_inspect(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:return: TeamMembershipsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_memberships_inspect_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.team_memberships_inspect_with_http_info(id, **kwargs) # noqa: E501
return data
def team_memberships_inspect_with_http_info(self, id, **kwargs): # noqa: E501
"""Inspect a team memberships # noqa: E501
Inspect a team memberships. Access is only available for administrator and leaders of that team. **Access policy**: restricted # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_memberships_inspect_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:return: TeamMembershipsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_memberships_inspect" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `team_memberships_inspect`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwt'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/memberships', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMembershipsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def team_update(self, id, body, **kwargs): # noqa: E501
"""Update a team # noqa: E501
Update a team. **Access policy**: administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_update(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:param TeamUpdateRequest body: Team details (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_update_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.team_update_with_http_info(id, body, **kwargs) # noqa: E501
return data
def team_update_with_http_info(self, id, body, **kwargs): # noqa: E501
"""Update a team # noqa: E501
Update a team. **Access policy**: administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_update_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: Team identifier (required)
:param TeamUpdateRequest body: Team details (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `team_update`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `team_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwt'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.638752
| 2,674
| 0.616689
| 2,990
| 25,358
| 5.033445
| 0.093311
| 0.045714
| 0.022326
| 0.028704
| 0.845648
| 0.840997
| 0.829635
| 0.820133
| 0.816013
| 0.807375
| 0
| 0.018897
| 0.29466
| 25,358
| 608
| 2,675
| 41.707237
| 0.822543
| 0.411704
| 0
| 0.80805
| 0
| 0
| 0.160372
| 0.036012
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040248
| false
| 0
| 0.012384
| 0
| 0.111455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cbb676d914e2a261191a1bc521491068091eb4ea
| 1,092
|
py
|
Python
|
essays/models.py
|
janzaheer/tarot_juicer
|
bec9d81ea1a9ebc3cac57ed681779c7890979fe6
|
[
"MIT"
] | null | null | null |
essays/models.py
|
janzaheer/tarot_juicer
|
bec9d81ea1a9ebc3cac57ed681779c7890979fe6
|
[
"MIT"
] | null | null | null |
essays/models.py
|
janzaheer/tarot_juicer
|
bec9d81ea1a9ebc3cac57ed681779c7890979fe6
|
[
"MIT"
] | null | null | null |
from django.db import models
'''from generators.models import Generator'''
class EssayArticle(models.Model):
title = models.CharField(max_length=256)
content = models.TextField(blank=True)
def __str__(self):
return self.title
class CuratedSlashdot(models.Model):
title = models.CharField(max_length=256)
introduction = models.TextField(blank=True)
# description = # shared with generators Model
# tarot_card_image = # shared with generators Model
# galileo_content = # shared with generators Model
# f_loss_content = # shared with generators Model
conclusion = models.TextField(blank=True)
def __str__(self):
return self.title
class CuratedWatchtower(models.Model):
title = models.CharField(max_length=256)
introduction = models.TextField(blank=True)
# description = # shared with generators Model
# tarot_card_image = # shared with generators Model
# st_paul_content = # shared with generators Model
conclusion = models.TextField(blank=True)
def __str__(self):
return self.title
| 30.333333
| 56
| 0.715201
| 127
| 1,092
| 5.96063
| 0.299213
| 0.09247
| 0.184941
| 0.231176
| 0.824306
| 0.782034
| 0.782034
| 0.782034
| 0.725231
| 0.725231
| 0
| 0.010297
| 0.199634
| 1,092
| 35
| 57
| 31.2
| 0.855835
| 0.306777
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.055556
| 0.166667
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 11
|
1df38caac70adbe263c13f5bc9560fbc99a40a42
| 75
|
py
|
Python
|
lock.py
|
lastbulletbender/srm-rmp-bot
|
bc358a57b577e8669a0aa1d7dff91656a316545a
|
[
"MIT"
] | null | null | null |
lock.py
|
lastbulletbender/srm-rmp-bot
|
bc358a57b577e8669a0aa1d7dff91656a316545a
|
[
"MIT"
] | null | null | null |
lock.py
|
lastbulletbender/srm-rmp-bot
|
bc358a57b577e8669a0aa1d7dff91656a316545a
|
[
"MIT"
] | null | null | null |
def enc(password):
return password
def dec(password):
return password
| 15
| 19
| 0.746667
| 10
| 75
| 5.6
| 0.5
| 0.5
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 75
| 4
| 20
| 18.75
| 0.903226
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 1
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
1dfa7d5f9d0c6bcd1e0b5a5fbc599f30dfb05aeb
| 6,815
|
py
|
Python
|
bp_unit_tests/level_1/unit_test_dot.py
|
nicholas-moreles/blaspy
|
c4af6258e17dd996c4b6d90bbaae15b31b8702b4
|
[
"BSD-3-Clause"
] | 4
|
2015-01-25T12:44:44.000Z
|
2022-03-19T08:36:19.000Z
|
bp_unit_tests/level_1/unit_test_dot.py
|
nicholas-moreles/blaspy
|
c4af6258e17dd996c4b6d90bbaae15b31b8702b4
|
[
"BSD-3-Clause"
] | 7
|
2015-01-20T13:35:39.000Z
|
2015-05-31T17:11:50.000Z
|
bp_unit_tests/level_1/unit_test_dot.py
|
nicholas-moreles/blaspy
|
c4af6258e17dd996c4b6d90bbaae15b31b8702b4
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright (c) 2014-2015, The University of Texas at Austin.
All rights reserved.
This file is part of BLASpy and is available under the 3-Clause
BSD License, which can be found in the LICENSE file at the top-level
directory or at http://opensource.org/licenses/BSD-3-Clause
"""
from blaspy import dot
from numpy import array, array_equal, asmatrix
from unittest import TestCase
class TestDot(TestCase):
def test_scalar_as_ndarray(self):
x = array([[1.]])
y = array([[2.]])
self.assertEqual(dot(x, y), 2)
def test_two_row_vectors_as_ndarrays(self):
x = array([[1., 2., 3.]])
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y), 10)
def test_two_column_vectors_as_ndarrays(self):
x = array([[1.], [2.], [3.]])
y = array([[3.], [2.], [1.]])
self.assertEqual(dot(x, y), 10)
def test_col_and_row_vectors_as_ndarrays(self):
x = array([[1.], [2.], [3.]])
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y), 10)
def test_row_and_col_vectors_as_ndarrays(self):
x = array([[1., 2., 3.]])
y = array([[3.], [2.], [1.]])
self.assertEqual(dot(x, y), 10)
def test_vectors_with_negatives_in_values(self):
x = array([[-1., -2., 3.]])
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y), -4)
def test_vectors_as_matrices(self):
x = asmatrix(array([[1., 2., 3.]]))
y = asmatrix(array([[3., 2., 1.]]))
self.assertEqual(dot(x, y), 10)
def test_vectors_as_mixed_matrices_and_ndarrays(self):
x = asmatrix(array([[1., 2., 3.]]))
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y), 10)
def test_strides_less_than_length(self):
x = array([[1., 2., 3.]])
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y, inc_x=2, inc_y=2), 6)
def test_strides_greater_than_length(self):
x = array([[1., 2., 3.]])
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y, inc_x=3, inc_y=3), 3)
def test_unequal_strides(self):
x = array([[1., 2., 3., 4., 5., 6.]])
y = array([[3., 2., 1.]])
self.assertEqual(dot(x, y, inc_x=3, inc_y=2), 7)
def test_float32_dtype(self):
x = array([[1., 2., 3.]], dtype='float32')
y = array([[3., 2., 1.]], dtype='float32')
self.assertEqual(x.dtype, 'float32')
self.assertEqual(y.dtype, 'float32')
self.assertEqual(dot(x, y), 10)
def test_float64_dtype(self):
x = array([[1., 2., 3.]], dtype='float64')
y = array([[3., 2., 1.]], dtype='float64')
self.assertEqual(x.dtype, 'float64')
self.assertEqual(y.dtype, 'float64')
self.assertEqual(dot(x, y), 10)
def test_not_numpy_with_list_for_x_raises_ValueError(self):
x = [[1., 2., 3.]]
y = array([[3., 2., 1.]])
self.assertRaises(ValueError, dot, x, y)
def test_not_numpy_with_list_for_y_raises_ValueError(self):
x = array([[1., 2., 3.]])
y = [[3., 2., 1.]]
self.assertRaises(ValueError, dot, x, y)
def test_not_numpy_with_scalar_for_x_raises_ValueError(self):
x = 1.
y = array([[3.]])
self.assertRaises(ValueError, dot, x, y)
def test_not_numpy_with_scalar_for_y_raises_ValueError(self):
x = array([[1.]])
y = 2.
self.assertRaises(ValueError, dot, x, y)
def test_not_2d_numpy_with_1d_for_x_raises_ValueError(self):
x = array([1., 2., 3.])
y = array([[3., 2., 1.]])
self.assertRaises(ValueError, dot, x, y)
def test_not_2d_numpy_with_1d_for_y_raises_ValueError(self):
x = array([[1., 2., 3.]])
y = array([3., 2., 1.])
self.assertRaises(ValueError, dot, x, y)
def test_not_2d_numpy_with_3d_for_x_raises_ValueError(self):
x = array([[[1.], [2.], [3.]]], ndmin=3)
y = array([[3., 2., 1.]])
self.assertRaises(ValueError, dot, x, y)
def test_not_2d_numpy_with_3d_for_y_raises_ValueError(self):
x = array([[1., 2., 3.]])
y = array([[[3.], [2.], [1.]]], ndmin=3)
self.assertRaises(ValueError, dot, x, y)
def test_not_vector_raises_ValueError(self):
x = array([[1., 2.], [3., 4.]])
y = array([[1., 2.], [3., 4.]])
self.assertRaises(ValueError, dot, x, y)
def test_unequal_vector_length_raises_ValueError(self):
x = array([[1., 2., 3.]])
y = array([[3., 2.]])
self.assertRaises(ValueError, dot, x, y)
def test_unequal_vector_length_with_strides_raises_ValueError(self):
x = array([[1., 2., 3.]])
y = array([[3., 2.]])
self.assertRaises(ValueError, dot, x, y, 2, 3)
def test_mixed_dtypes_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='float32')
y = array([[3., 2., 1.]], dtype='float64')
self.assertEqual(x.dtype, 'float32')
self.assertEqual(y.dtype, 'float64')
self.assertRaises(ValueError, dot, x, y)
def test_integer_dtype_for_both_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='int')
y = array([[3., 2., 1.]], dtype='int')
self.assertEqual(x.dtype, 'int')
self.assertEqual(y.dtype, 'int')
self.assertRaises(ValueError, dot, x, y)
def test_integer_dtype_for_x_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='int')
y = array([[3., 2., 1.]], dtype='float64')
self.assertEqual(x.dtype, 'int')
self.assertEqual(y.dtype, 'float64')
self.assertRaises(ValueError, dot, x, y)
def test_integer_dtype_for_y_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='float64')
y = array([[3, 2., 1.]], dtype='int')
self.assertEqual(x.dtype, 'float64')
self.assertEqual(y.dtype, 'int')
self.assertRaises(ValueError, dot, x, y)
def test_complex_dtype_for_both_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='complex')
y = array([[3., 2., 1.]], dtype='complex')
self.assertEqual(x.dtype, 'complex')
self.assertEqual(y.dtype, 'complex')
self.assertRaises(ValueError, dot, x, y)
def test_complex_dtype_for_x_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='complex')
y = array([[3., 2., 1.]], dtype='float64')
self.assertEqual(x.dtype, 'complex')
self.assertEqual(y.dtype, 'float64')
self.assertRaises(ValueError, dot, x, y)
def test_complex_dtype_for_y_raises_ValueError(self):
x = array([[1., 2., 3.]], dtype='float64')
y = array([[3., 2., 1.]], dtype='complex')
self.assertEqual(x.dtype, 'float64')
self.assertEqual(y.dtype, 'complex')
self.assertRaises(ValueError, dot, x, y)
| 35.868421
| 72
| 0.567572
| 974
| 6,815
| 3.782341
| 0.104723
| 0.058903
| 0.042074
| 0.060803
| 0.832519
| 0.821118
| 0.819218
| 0.796688
| 0.753257
| 0.716612
| 0
| 0.052427
| 0.244314
| 6,815
| 190
| 73
| 35.868421
| 0.662913
| 0.040205
| 0
| 0.589041
| 0
| 0
| 0.033784
| 0
| 0
| 0
| 0
| 0
| 0.335616
| 1
| 0.212329
| false
| 0
| 0.020548
| 0
| 0.239726
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
383127c7c86fa9aed0026bc35b67ea9ce2f3a429
| 31
|
py
|
Python
|
tlc59711/__init__.py
|
davidjun/tlc59711_spidev_driver
|
c1edaf92c899731961a43fcfe3b8d4435821c00b
|
[
"MIT"
] | null | null | null |
tlc59711/__init__.py
|
davidjun/tlc59711_spidev_driver
|
c1edaf92c899731961a43fcfe3b8d4435821c00b
|
[
"MIT"
] | null | null | null |
tlc59711/__init__.py
|
davidjun/tlc59711_spidev_driver
|
c1edaf92c899731961a43fcfe3b8d4435821c00b
|
[
"MIT"
] | null | null | null |
from .tlc59711 import TLC59711
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.37037
| 0.129032
| 31
| 1
| 31
| 31
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
69766b99240debdd8372379482b031439f8b7984
| 399,078
|
py
|
Python
|
test/python/sample/stdlib/test_configparser.py
|
esoma/woosh
|
4cd58eb93c69c48040ff1e25159e48def54c3111
|
[
"MIT"
] | null | null | null |
test/python/sample/stdlib/test_configparser.py
|
esoma/woosh
|
4cd58eb93c69c48040ff1e25159e48def54c3111
|
[
"MIT"
] | null | null | null |
test/python/sample/stdlib/test_configparser.py
|
esoma/woosh
|
4cd58eb93c69c48040ff1e25159e48def54c3111
|
[
"MIT"
] | null | null | null |
# this file was generated using test/python/sample/generate.py
# python
import io
import pathlib
# pytest
import pytest
# woosh
import woosh
def tokenize_file_like(source):
return list(woosh.tokenize(io.BytesIO(source)))
def tokenize_bytes(source):
return list(woosh.tokenize(source))
SAMPLE_DIR = pathlib.Path(__file__).parent.absolute() / '../../' / '../../' / 'sample'
@pytest.mark.parametrize('tokenize', [tokenize_file_like, tokenize_bytes])
def test(tokenize):
with open(SAMPLE_DIR / 'stdlib/configparser.py', 'rb') as f:
tokens = tokenize(f.read())
for token, expected in zip(tokens, EXPECTED):
assert token == expected
EXPECTED = [
woosh.Token(woosh.ENCODING, 'utf-8', 1, 0, 1, 0),
woosh.Token(woosh.STRING, '"""Configuration file parser.\r\n\r\nA configuration file consists of sections, lead by a "[section]" header,\r\nand followed by "name: value" entries, with continuations and such in\r\nthe style of RFC 822.\r\n\r\nIntrinsic defaults can be specified by passing them into the\r\nConfigParser constructor as a dictionary.\r\n\r\nclass:\r\n\r\nConfigParser -- responsible for parsing a list of\r\n configuration files, and managing the parsed database.\r\n\r\n methods:\r\n\r\n __init__(defaults=None, dict_type=_default_dict, allow_no_value=False,\r\n delimiters=(\'=\', \':\'), comment_prefixes=(\'#\', \';\'),\r\n inline_comment_prefixes=None, strict=True,\r\n empty_lines_in_values=True, default_section=\'DEFAULT\',\r\n interpolation=<unset>, converters=<unset>):\r\n Create the parser. When `defaults\' is given, it is initialized into the\r\n dictionary or intrinsic defaults. The keys must be strings, the values\r\n must be appropriate for %()s string interpolation.\r\n\r\n When `dict_type\' is given, it will be used to create the dictionary\r\n objects for the list of sections, for the options within a section, and\r\n for the default values.\r\n\r\n When `delimiters\' is given, it will be used as the set of substrings\r\n that divide keys from values.\r\n\r\n When `comment_prefixes\' is given, it will be used as the set of\r\n substrings that prefix comments in empty lines. Comments can be\r\n indented.\r\n\r\n When `inline_comment_prefixes\' is given, it will be used as the set of\r\n substrings that prefix comments in non-empty lines.\r\n\r\n When `strict` is True, the parser won\'t allow for any section or option\r\n duplicates while reading from a single source (file, string or\r\n dictionary). Default is True.\r\n\r\n When `empty_lines_in_values\' is False (default: True), each empty line\r\n marks the end of an option. Otherwise, internal empty lines of\r\n a multiline option are kept as part of the value.\r\n\r\n When `allow_no_value\' is True (default: False), options without\r\n values are accepted; the value presented for these is None.\r\n\r\n When `default_section\' is given, the name of the special section is\r\n named accordingly. By default it is called ``"DEFAULT"`` but this can\r\n be customized to point to any other valid section name. Its current\r\n value can be retrieved using the ``parser_instance.default_section``\r\n attribute and may be modified at runtime.\r\n\r\n When `interpolation` is given, it should be an Interpolation subclass\r\n instance. It will be used as the handler for option value\r\n pre-processing when using getters. RawConfigParser objects don\'t do\r\n any sort of interpolation, whereas ConfigParser uses an instance of\r\n BasicInterpolation. The library also provides a ``zc.buildbot``\r\n inspired ExtendedInterpolation implementation.\r\n\r\n When `converters` is given, it should be a dictionary where each key\r\n represents the name of a type converter and each value is a callable\r\n implementing the conversion from string to the desired datatype. Every\r\n converter gets its corresponding get*() method on the parser object and\r\n section proxies.\r\n\r\n sections()\r\n Return all the configuration section names, sans DEFAULT.\r\n\r\n has_section(section)\r\n Return whether the given section exists.\r\n\r\n has_option(section, option)\r\n Return whether the given option exists in the given section.\r\n\r\n options(section)\r\n Return list of configuration options for the named section.\r\n\r\n read(filenames, encoding=None)\r\n Read and parse the iterable of named configuration files, given by\r\n name. A single filename is also allowed. Non-existing files\r\n are ignored. Return list of successfully read files.\r\n\r\n read_file(f, filename=None)\r\n Read and parse one configuration file, given as a file object.\r\n The filename defaults to f.name; it is only used in error\r\n messages (if f has no `name\' attribute, the string `<???>\' is used).\r\n\r\n read_string(string)\r\n Read configuration from a given string.\r\n\r\n read_dict(dictionary)\r\n Read configuration from a dictionary. Keys are section names,\r\n values are dictionaries with keys and values that should be present\r\n in the section. If the used dictionary type preserves order, sections\r\n and their keys will be added in order. Values are automatically\r\n converted to strings.\r\n\r\n get(section, option, raw=False, vars=None, fallback=_UNSET)\r\n Return a string value for the named option. All % interpolations are\r\n expanded in the return values, based on the defaults passed into the\r\n constructor and the DEFAULT section. Additional substitutions may be\r\n provided using the `vars\' argument, which must be a dictionary whose\r\n contents override any pre-existing defaults. If `option\' is a key in\r\n `vars\', the value from `vars\' is used.\r\n\r\n getint(section, options, raw=False, vars=None, fallback=_UNSET)\r\n Like get(), but convert value to an integer.\r\n\r\n getfloat(section, options, raw=False, vars=None, fallback=_UNSET)\r\n Like get(), but convert value to a float.\r\n\r\n getboolean(section, options, raw=False, vars=None, fallback=_UNSET)\r\n Like get(), but convert value to a boolean (currently case\r\n insensitively defined as 0, false, no, off for False, and 1, true,\r\n yes, on for True). Returns False or True.\r\n\r\n items(section=_UNSET, raw=False, vars=None)\r\n If section is given, return a list of tuples with (name, value) for\r\n each option in the section. Otherwise, return a list of tuples with\r\n (section_name, section_proxy) for each section, including DEFAULTSECT.\r\n\r\n remove_section(section)\r\n Remove the given file section and all its options.\r\n\r\n remove_option(section, option)\r\n Remove the given option from the given section.\r\n\r\n set(section, option, value)\r\n Set the given option.\r\n\r\n write(fp, space_around_delimiters=True)\r\n Write the configuration state in .ini format. If\r\n `space_around_delimiters\' is True (the default), delimiters\r\n between keys and values are surrounded by spaces.\r\n"""', 1, 0, 139, 3),
woosh.Token(woosh.NEWLINE, '\r\n', 139, 3, 140, 0),
woosh.Token(woosh.NAME, 'from', 141, 0, 141, 4),
woosh.Token(woosh.NAME, 'collections', 141, 5, 141, 16),
woosh.Token(woosh.OP, '.', 141, 16, 141, 17),
woosh.Token(woosh.NAME, 'abc', 141, 17, 141, 20),
woosh.Token(woosh.NAME, 'import', 141, 21, 141, 27),
woosh.Token(woosh.NAME, 'MutableMapping', 141, 28, 141, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 141, 42, 142, 0),
woosh.Token(woosh.NAME, 'from', 142, 0, 142, 4),
woosh.Token(woosh.NAME, 'collections', 142, 5, 142, 16),
woosh.Token(woosh.NAME, 'import', 142, 17, 142, 23),
woosh.Token(woosh.NAME, 'ChainMap', 142, 24, 142, 32),
woosh.Token(woosh.NAME, 'as', 142, 33, 142, 35),
woosh.Token(woosh.NAME, '_ChainMap', 142, 36, 142, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 142, 45, 143, 0),
woosh.Token(woosh.NAME, 'import', 143, 0, 143, 6),
woosh.Token(woosh.NAME, 'functools', 143, 7, 143, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 143, 16, 144, 0),
woosh.Token(woosh.NAME, 'import', 144, 0, 144, 6),
woosh.Token(woosh.NAME, 'io', 144, 7, 144, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 144, 9, 145, 0),
woosh.Token(woosh.NAME, 'import', 145, 0, 145, 6),
woosh.Token(woosh.NAME, 'itertools', 145, 7, 145, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 145, 16, 146, 0),
woosh.Token(woosh.NAME, 'import', 146, 0, 146, 6),
woosh.Token(woosh.NAME, 'os', 146, 7, 146, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 146, 9, 147, 0),
woosh.Token(woosh.NAME, 'import', 147, 0, 147, 6),
woosh.Token(woosh.NAME, 're', 147, 7, 147, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 147, 9, 148, 0),
woosh.Token(woosh.NAME, 'import', 148, 0, 148, 6),
woosh.Token(woosh.NAME, 'sys', 148, 7, 148, 10),
woosh.Token(woosh.NEWLINE, '\r\n', 148, 10, 149, 0),
woosh.Token(woosh.NAME, 'import', 149, 0, 149, 6),
woosh.Token(woosh.NAME, 'warnings', 149, 7, 149, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 149, 15, 150, 0),
woosh.Token(woosh.NAME, '__all__', 151, 0, 151, 7),
woosh.Token(woosh.OP, '=', 151, 8, 151, 9),
woosh.Token(woosh.OP, '[', 151, 10, 151, 11),
woosh.Token(woosh.STRING, '"NoSectionError"', 151, 11, 151, 27),
woosh.Token(woosh.OP, ',', 151, 27, 151, 28),
woosh.Token(woosh.STRING, '"DuplicateOptionError"', 151, 29, 151, 51),
woosh.Token(woosh.OP, ',', 151, 51, 151, 52),
woosh.Token(woosh.STRING, '"DuplicateSectionError"', 151, 53, 151, 76),
woosh.Token(woosh.OP, ',', 151, 76, 151, 77),
woosh.Token(woosh.STRING, '"NoOptionError"', 152, 11, 152, 26),
woosh.Token(woosh.OP, ',', 152, 26, 152, 27),
woosh.Token(woosh.STRING, '"InterpolationError"', 152, 28, 152, 48),
woosh.Token(woosh.OP, ',', 152, 48, 152, 49),
woosh.Token(woosh.STRING, '"InterpolationDepthError"', 152, 50, 152, 75),
woosh.Token(woosh.OP, ',', 152, 75, 152, 76),
woosh.Token(woosh.STRING, '"InterpolationMissingOptionError"', 153, 11, 153, 44),
woosh.Token(woosh.OP, ',', 153, 44, 153, 45),
woosh.Token(woosh.STRING, '"InterpolationSyntaxError"', 153, 46, 153, 72),
woosh.Token(woosh.OP, ',', 153, 72, 153, 73),
woosh.Token(woosh.STRING, '"ParsingError"', 154, 11, 154, 25),
woosh.Token(woosh.OP, ',', 154, 25, 154, 26),
woosh.Token(woosh.STRING, '"MissingSectionHeaderError"', 154, 27, 154, 54),
woosh.Token(woosh.OP, ',', 154, 54, 154, 55),
woosh.Token(woosh.STRING, '"ConfigParser"', 155, 11, 155, 25),
woosh.Token(woosh.OP, ',', 155, 25, 155, 26),
woosh.Token(woosh.STRING, '"SafeConfigParser"', 155, 27, 155, 45),
woosh.Token(woosh.OP, ',', 155, 45, 155, 46),
woosh.Token(woosh.STRING, '"RawConfigParser"', 155, 47, 155, 64),
woosh.Token(woosh.OP, ',', 155, 64, 155, 65),
woosh.Token(woosh.STRING, '"Interpolation"', 156, 11, 156, 26),
woosh.Token(woosh.OP, ',', 156, 26, 156, 27),
woosh.Token(woosh.STRING, '"BasicInterpolation"', 156, 28, 156, 48),
woosh.Token(woosh.OP, ',', 156, 48, 156, 49),
woosh.Token(woosh.STRING, '"ExtendedInterpolation"', 156, 51, 156, 74),
woosh.Token(woosh.OP, ',', 156, 74, 156, 75),
woosh.Token(woosh.STRING, '"LegacyInterpolation"', 157, 11, 157, 32),
woosh.Token(woosh.OP, ',', 157, 32, 157, 33),
woosh.Token(woosh.STRING, '"SectionProxy"', 157, 34, 157, 48),
woosh.Token(woosh.OP, ',', 157, 48, 157, 49),
woosh.Token(woosh.STRING, '"ConverterMapping"', 157, 50, 157, 68),
woosh.Token(woosh.OP, ',', 157, 68, 157, 69),
woosh.Token(woosh.STRING, '"DEFAULTSECT"', 158, 11, 158, 24),
woosh.Token(woosh.OP, ',', 158, 24, 158, 25),
woosh.Token(woosh.STRING, '"MAX_INTERPOLATION_DEPTH"', 158, 26, 158, 51),
woosh.Token(woosh.OP, ']', 158, 51, 158, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 158, 52, 159, 0),
woosh.Token(woosh.NAME, '_default_dict', 160, 0, 160, 13),
woosh.Token(woosh.OP, '=', 160, 14, 160, 15),
woosh.Token(woosh.NAME, 'dict', 160, 16, 160, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 160, 20, 161, 0),
woosh.Token(woosh.NAME, 'DEFAULTSECT', 161, 0, 161, 11),
woosh.Token(woosh.OP, '=', 161, 12, 161, 13),
woosh.Token(woosh.STRING, '"DEFAULT"', 161, 14, 161, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 161, 23, 162, 0),
woosh.Token(woosh.NAME, 'MAX_INTERPOLATION_DEPTH', 163, 0, 163, 23),
woosh.Token(woosh.OP, '=', 163, 24, 163, 25),
woosh.Token(woosh.NUMBER, '10', 163, 26, 163, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 163, 28, 164, 0),
woosh.Token(woosh.COMMENT, '# exception classes', 167, 0, 167, 19),
woosh.Token(woosh.NAME, 'class', 168, 0, 168, 5),
woosh.Token(woosh.NAME, 'Error', 168, 6, 168, 11),
woosh.Token(woosh.OP, '(', 168, 11, 168, 12),
woosh.Token(woosh.NAME, 'Exception', 168, 12, 168, 21),
woosh.Token(woosh.OP, ')', 168, 21, 168, 22),
woosh.Token(woosh.OP, ':', 168, 22, 168, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 168, 23, 169, 0),
woosh.Token(woosh.INDENT, ' ', 169, 0, 169, 4),
woosh.Token(woosh.STRING, '"""Base class for ConfigParser exceptions."""', 169, 4, 169, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 169, 49, 170, 0),
woosh.Token(woosh.NAME, 'def', 171, 4, 171, 7),
woosh.Token(woosh.NAME, '__init__', 171, 8, 171, 16),
woosh.Token(woosh.OP, '(', 171, 16, 171, 17),
woosh.Token(woosh.NAME, 'self', 171, 17, 171, 21),
woosh.Token(woosh.OP, ',', 171, 21, 171, 22),
woosh.Token(woosh.NAME, 'msg', 171, 23, 171, 26),
woosh.Token(woosh.OP, '=', 171, 26, 171, 27),
woosh.Token(woosh.STRING, "''", 171, 27, 171, 29),
woosh.Token(woosh.OP, ')', 171, 29, 171, 30),
woosh.Token(woosh.OP, ':', 171, 30, 171, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 171, 31, 172, 0),
woosh.Token(woosh.INDENT, ' ', 172, 0, 172, 8),
woosh.Token(woosh.NAME, 'self', 172, 8, 172, 12),
woosh.Token(woosh.OP, '.', 172, 12, 172, 13),
woosh.Token(woosh.NAME, 'message', 172, 13, 172, 20),
woosh.Token(woosh.OP, '=', 172, 21, 172, 22),
woosh.Token(woosh.NAME, 'msg', 172, 23, 172, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 172, 26, 173, 0),
woosh.Token(woosh.NAME, 'Exception', 173, 8, 173, 17),
woosh.Token(woosh.OP, '.', 173, 17, 173, 18),
woosh.Token(woosh.NAME, '__init__', 173, 18, 173, 26),
woosh.Token(woosh.OP, '(', 173, 26, 173, 27),
woosh.Token(woosh.NAME, 'self', 173, 27, 173, 31),
woosh.Token(woosh.OP, ',', 173, 31, 173, 32),
woosh.Token(woosh.NAME, 'msg', 173, 33, 173, 36),
woosh.Token(woosh.OP, ')', 173, 36, 173, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 173, 37, 174, 0),
woosh.Token(woosh.DEDENT, ' ', 175, 0, 175, 4),
woosh.Token(woosh.NAME, 'def', 175, 4, 175, 7),
woosh.Token(woosh.NAME, '__repr__', 175, 8, 175, 16),
woosh.Token(woosh.OP, '(', 175, 16, 175, 17),
woosh.Token(woosh.NAME, 'self', 175, 17, 175, 21),
woosh.Token(woosh.OP, ')', 175, 21, 175, 22),
woosh.Token(woosh.OP, ':', 175, 22, 175, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 175, 23, 176, 0),
woosh.Token(woosh.INDENT, ' ', 176, 0, 176, 8),
woosh.Token(woosh.NAME, 'return', 176, 8, 176, 14),
woosh.Token(woosh.NAME, 'self', 176, 15, 176, 19),
woosh.Token(woosh.OP, '.', 176, 19, 176, 20),
woosh.Token(woosh.NAME, 'message', 176, 20, 176, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 176, 27, 177, 0),
woosh.Token(woosh.DEDENT, ' ', 178, 0, 178, 4),
woosh.Token(woosh.NAME, '__str__', 178, 4, 178, 11),
woosh.Token(woosh.OP, '=', 178, 12, 178, 13),
woosh.Token(woosh.NAME, '__repr__', 178, 14, 178, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 178, 22, 179, 0),
woosh.Token(woosh.DEDENT, '', 181, 0, 181, 0),
woosh.Token(woosh.NAME, 'class', 181, 0, 181, 5),
woosh.Token(woosh.NAME, 'NoSectionError', 181, 6, 181, 20),
woosh.Token(woosh.OP, '(', 181, 20, 181, 21),
woosh.Token(woosh.NAME, 'Error', 181, 21, 181, 26),
woosh.Token(woosh.OP, ')', 181, 26, 181, 27),
woosh.Token(woosh.OP, ':', 181, 27, 181, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 181, 28, 182, 0),
woosh.Token(woosh.INDENT, ' ', 182, 0, 182, 4),
woosh.Token(woosh.STRING, '"""Raised when no section matches a requested option."""', 182, 4, 182, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 182, 60, 183, 0),
woosh.Token(woosh.NAME, 'def', 184, 4, 184, 7),
woosh.Token(woosh.NAME, '__init__', 184, 8, 184, 16),
woosh.Token(woosh.OP, '(', 184, 16, 184, 17),
woosh.Token(woosh.NAME, 'self', 184, 17, 184, 21),
woosh.Token(woosh.OP, ',', 184, 21, 184, 22),
woosh.Token(woosh.NAME, 'section', 184, 23, 184, 30),
woosh.Token(woosh.OP, ')', 184, 30, 184, 31),
woosh.Token(woosh.OP, ':', 184, 31, 184, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 184, 32, 185, 0),
woosh.Token(woosh.INDENT, ' ', 185, 0, 185, 8),
woosh.Token(woosh.NAME, 'Error', 185, 8, 185, 13),
woosh.Token(woosh.OP, '.', 185, 13, 185, 14),
woosh.Token(woosh.NAME, '__init__', 185, 14, 185, 22),
woosh.Token(woosh.OP, '(', 185, 22, 185, 23),
woosh.Token(woosh.NAME, 'self', 185, 23, 185, 27),
woosh.Token(woosh.OP, ',', 185, 27, 185, 28),
woosh.Token(woosh.STRING, "'No section: %r'", 185, 29, 185, 45),
woosh.Token(woosh.OP, '%', 185, 46, 185, 47),
woosh.Token(woosh.OP, '(', 185, 48, 185, 49),
woosh.Token(woosh.NAME, 'section', 185, 49, 185, 56),
woosh.Token(woosh.OP, ',', 185, 56, 185, 57),
woosh.Token(woosh.OP, ')', 185, 57, 185, 58),
woosh.Token(woosh.OP, ')', 185, 58, 185, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 185, 59, 186, 0),
woosh.Token(woosh.NAME, 'self', 186, 8, 186, 12),
woosh.Token(woosh.OP, '.', 186, 12, 186, 13),
woosh.Token(woosh.NAME, 'section', 186, 13, 186, 20),
woosh.Token(woosh.OP, '=', 186, 21, 186, 22),
woosh.Token(woosh.NAME, 'section', 186, 23, 186, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 186, 30, 187, 0),
woosh.Token(woosh.NAME, 'self', 187, 8, 187, 12),
woosh.Token(woosh.OP, '.', 187, 12, 187, 13),
woosh.Token(woosh.NAME, 'args', 187, 13, 187, 17),
woosh.Token(woosh.OP, '=', 187, 18, 187, 19),
woosh.Token(woosh.OP, '(', 187, 20, 187, 21),
woosh.Token(woosh.NAME, 'section', 187, 21, 187, 28),
woosh.Token(woosh.OP, ',', 187, 28, 187, 29),
woosh.Token(woosh.OP, ')', 187, 30, 187, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 187, 31, 188, 0),
woosh.Token(woosh.DEDENT, '', 190, 0, 190, 0),
woosh.Token(woosh.DEDENT, '', 190, 0, 190, 0),
woosh.Token(woosh.NAME, 'class', 190, 0, 190, 5),
woosh.Token(woosh.NAME, 'DuplicateSectionError', 190, 6, 190, 27),
woosh.Token(woosh.OP, '(', 190, 27, 190, 28),
woosh.Token(woosh.NAME, 'Error', 190, 28, 190, 33),
woosh.Token(woosh.OP, ')', 190, 33, 190, 34),
woosh.Token(woosh.OP, ':', 190, 34, 190, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 190, 35, 191, 0),
woosh.Token(woosh.INDENT, ' ', 191, 0, 191, 4),
woosh.Token(woosh.STRING, '"""Raised when a section is repeated in an input source.\r\n\r\n Possible repetitions that raise this exception are: multiple creation\r\n using the API or in strict parsers when a section is found more than once\r\n in a single input file, string or dictionary.\r\n """', 191, 4, 196, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 196, 7, 197, 0),
woosh.Token(woosh.NAME, 'def', 198, 4, 198, 7),
woosh.Token(woosh.NAME, '__init__', 198, 8, 198, 16),
woosh.Token(woosh.OP, '(', 198, 16, 198, 17),
woosh.Token(woosh.NAME, 'self', 198, 17, 198, 21),
woosh.Token(woosh.OP, ',', 198, 21, 198, 22),
woosh.Token(woosh.NAME, 'section', 198, 23, 198, 30),
woosh.Token(woosh.OP, ',', 198, 30, 198, 31),
woosh.Token(woosh.NAME, 'source', 198, 32, 198, 38),
woosh.Token(woosh.OP, '=', 198, 38, 198, 39),
woosh.Token(woosh.NAME, 'None', 198, 39, 198, 43),
woosh.Token(woosh.OP, ',', 198, 43, 198, 44),
woosh.Token(woosh.NAME, 'lineno', 198, 45, 198, 51),
woosh.Token(woosh.OP, '=', 198, 51, 198, 52),
woosh.Token(woosh.NAME, 'None', 198, 52, 198, 56),
woosh.Token(woosh.OP, ')', 198, 56, 198, 57),
woosh.Token(woosh.OP, ':', 198, 57, 198, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 198, 58, 199, 0),
woosh.Token(woosh.INDENT, ' ', 199, 0, 199, 8),
woosh.Token(woosh.NAME, 'msg', 199, 8, 199, 11),
woosh.Token(woosh.OP, '=', 199, 12, 199, 13),
woosh.Token(woosh.OP, '[', 199, 14, 199, 15),
woosh.Token(woosh.NAME, 'repr', 199, 15, 199, 19),
woosh.Token(woosh.OP, '(', 199, 19, 199, 20),
woosh.Token(woosh.NAME, 'section', 199, 20, 199, 27),
woosh.Token(woosh.OP, ')', 199, 27, 199, 28),
woosh.Token(woosh.OP, ',', 199, 28, 199, 29),
woosh.Token(woosh.STRING, '" already exists"', 199, 30, 199, 47),
woosh.Token(woosh.OP, ']', 199, 47, 199, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 199, 48, 200, 0),
woosh.Token(woosh.NAME, 'if', 200, 8, 200, 10),
woosh.Token(woosh.NAME, 'source', 200, 11, 200, 17),
woosh.Token(woosh.NAME, 'is', 200, 18, 200, 20),
woosh.Token(woosh.NAME, 'not', 200, 21, 200, 24),
woosh.Token(woosh.NAME, 'None', 200, 25, 200, 29),
woosh.Token(woosh.OP, ':', 200, 29, 200, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 200, 30, 201, 0),
woosh.Token(woosh.INDENT, ' ', 201, 0, 201, 12),
woosh.Token(woosh.NAME, 'message', 201, 12, 201, 19),
woosh.Token(woosh.OP, '=', 201, 20, 201, 21),
woosh.Token(woosh.OP, '[', 201, 22, 201, 23),
woosh.Token(woosh.STRING, '"While reading from "', 201, 23, 201, 44),
woosh.Token(woosh.OP, ',', 201, 44, 201, 45),
woosh.Token(woosh.NAME, 'repr', 201, 46, 201, 50),
woosh.Token(woosh.OP, '(', 201, 50, 201, 51),
woosh.Token(woosh.NAME, 'source', 201, 51, 201, 57),
woosh.Token(woosh.OP, ')', 201, 57, 201, 58),
woosh.Token(woosh.OP, ']', 201, 58, 201, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 201, 59, 202, 0),
woosh.Token(woosh.NAME, 'if', 202, 12, 202, 14),
woosh.Token(woosh.NAME, 'lineno', 202, 15, 202, 21),
woosh.Token(woosh.NAME, 'is', 202, 22, 202, 24),
woosh.Token(woosh.NAME, 'not', 202, 25, 202, 28),
woosh.Token(woosh.NAME, 'None', 202, 29, 202, 33),
woosh.Token(woosh.OP, ':', 202, 33, 202, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 202, 34, 203, 0),
woosh.Token(woosh.INDENT, ' ', 203, 0, 203, 16),
woosh.Token(woosh.NAME, 'message', 203, 16, 203, 23),
woosh.Token(woosh.OP, '.', 203, 23, 203, 24),
woosh.Token(woosh.NAME, 'append', 203, 24, 203, 30),
woosh.Token(woosh.OP, '(', 203, 30, 203, 31),
woosh.Token(woosh.STRING, '" [line {0:2d}]"', 203, 31, 203, 47),
woosh.Token(woosh.OP, '.', 203, 47, 203, 48),
woosh.Token(woosh.NAME, 'format', 203, 48, 203, 54),
woosh.Token(woosh.OP, '(', 203, 54, 203, 55),
woosh.Token(woosh.NAME, 'lineno', 203, 55, 203, 61),
woosh.Token(woosh.OP, ')', 203, 61, 203, 62),
woosh.Token(woosh.OP, ')', 203, 62, 203, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 203, 63, 204, 0),
woosh.Token(woosh.DEDENT, ' ', 204, 0, 204, 12),
woosh.Token(woosh.NAME, 'message', 204, 12, 204, 19),
woosh.Token(woosh.OP, '.', 204, 19, 204, 20),
woosh.Token(woosh.NAME, 'append', 204, 20, 204, 26),
woosh.Token(woosh.OP, '(', 204, 26, 204, 27),
woosh.Token(woosh.STRING, '": section "', 204, 27, 204, 39),
woosh.Token(woosh.OP, ')', 204, 39, 204, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 204, 40, 205, 0),
woosh.Token(woosh.NAME, 'message', 205, 12, 205, 19),
woosh.Token(woosh.OP, '.', 205, 19, 205, 20),
woosh.Token(woosh.NAME, 'extend', 205, 20, 205, 26),
woosh.Token(woosh.OP, '(', 205, 26, 205, 27),
woosh.Token(woosh.NAME, 'msg', 205, 27, 205, 30),
woosh.Token(woosh.OP, ')', 205, 30, 205, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 205, 31, 206, 0),
woosh.Token(woosh.NAME, 'msg', 206, 12, 206, 15),
woosh.Token(woosh.OP, '=', 206, 16, 206, 17),
woosh.Token(woosh.NAME, 'message', 206, 18, 206, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 206, 25, 207, 0),
woosh.Token(woosh.DEDENT, ' ', 207, 0, 207, 8),
woosh.Token(woosh.NAME, 'else', 207, 8, 207, 12),
woosh.Token(woosh.OP, ':', 207, 12, 207, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 207, 13, 208, 0),
woosh.Token(woosh.INDENT, ' ', 208, 0, 208, 12),
woosh.Token(woosh.NAME, 'msg', 208, 12, 208, 15),
woosh.Token(woosh.OP, '.', 208, 15, 208, 16),
woosh.Token(woosh.NAME, 'insert', 208, 16, 208, 22),
woosh.Token(woosh.OP, '(', 208, 22, 208, 23),
woosh.Token(woosh.NUMBER, '0', 208, 23, 208, 24),
woosh.Token(woosh.OP, ',', 208, 24, 208, 25),
woosh.Token(woosh.STRING, '"Section "', 208, 26, 208, 36),
woosh.Token(woosh.OP, ')', 208, 36, 208, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 208, 37, 209, 0),
woosh.Token(woosh.DEDENT, ' ', 209, 0, 209, 8),
woosh.Token(woosh.NAME, 'Error', 209, 8, 209, 13),
woosh.Token(woosh.OP, '.', 209, 13, 209, 14),
woosh.Token(woosh.NAME, '__init__', 209, 14, 209, 22),
woosh.Token(woosh.OP, '(', 209, 22, 209, 23),
woosh.Token(woosh.NAME, 'self', 209, 23, 209, 27),
woosh.Token(woosh.OP, ',', 209, 27, 209, 28),
woosh.Token(woosh.STRING, '""', 209, 29, 209, 31),
woosh.Token(woosh.OP, '.', 209, 31, 209, 32),
woosh.Token(woosh.NAME, 'join', 209, 32, 209, 36),
woosh.Token(woosh.OP, '(', 209, 36, 209, 37),
woosh.Token(woosh.NAME, 'msg', 209, 37, 209, 40),
woosh.Token(woosh.OP, ')', 209, 40, 209, 41),
woosh.Token(woosh.OP, ')', 209, 41, 209, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 209, 42, 210, 0),
woosh.Token(woosh.NAME, 'self', 210, 8, 210, 12),
woosh.Token(woosh.OP, '.', 210, 12, 210, 13),
woosh.Token(woosh.NAME, 'section', 210, 13, 210, 20),
woosh.Token(woosh.OP, '=', 210, 21, 210, 22),
woosh.Token(woosh.NAME, 'section', 210, 23, 210, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 210, 30, 211, 0),
woosh.Token(woosh.NAME, 'self', 211, 8, 211, 12),
woosh.Token(woosh.OP, '.', 211, 12, 211, 13),
woosh.Token(woosh.NAME, 'source', 211, 13, 211, 19),
woosh.Token(woosh.OP, '=', 211, 20, 211, 21),
woosh.Token(woosh.NAME, 'source', 211, 22, 211, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 211, 28, 212, 0),
woosh.Token(woosh.NAME, 'self', 212, 8, 212, 12),
woosh.Token(woosh.OP, '.', 212, 12, 212, 13),
woosh.Token(woosh.NAME, 'lineno', 212, 13, 212, 19),
woosh.Token(woosh.OP, '=', 212, 20, 212, 21),
woosh.Token(woosh.NAME, 'lineno', 212, 22, 212, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 212, 28, 213, 0),
woosh.Token(woosh.NAME, 'self', 213, 8, 213, 12),
woosh.Token(woosh.OP, '.', 213, 12, 213, 13),
woosh.Token(woosh.NAME, 'args', 213, 13, 213, 17),
woosh.Token(woosh.OP, '=', 213, 18, 213, 19),
woosh.Token(woosh.OP, '(', 213, 20, 213, 21),
woosh.Token(woosh.NAME, 'section', 213, 21, 213, 28),
woosh.Token(woosh.OP, ',', 213, 28, 213, 29),
woosh.Token(woosh.NAME, 'source', 213, 30, 213, 36),
woosh.Token(woosh.OP, ',', 213, 36, 213, 37),
woosh.Token(woosh.NAME, 'lineno', 213, 38, 213, 44),
woosh.Token(woosh.OP, ')', 213, 44, 213, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 213, 45, 214, 0),
woosh.Token(woosh.DEDENT, '', 216, 0, 216, 0),
woosh.Token(woosh.DEDENT, '', 216, 0, 216, 0),
woosh.Token(woosh.NAME, 'class', 216, 0, 216, 5),
woosh.Token(woosh.NAME, 'DuplicateOptionError', 216, 6, 216, 26),
woosh.Token(woosh.OP, '(', 216, 26, 216, 27),
woosh.Token(woosh.NAME, 'Error', 216, 27, 216, 32),
woosh.Token(woosh.OP, ')', 216, 32, 216, 33),
woosh.Token(woosh.OP, ':', 216, 33, 216, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 216, 34, 217, 0),
woosh.Token(woosh.INDENT, ' ', 217, 0, 217, 4),
woosh.Token(woosh.STRING, '"""Raised by strict parsers when an option is repeated in an input source.\r\n\r\n Current implementation raises this exception only when an option is found\r\n more than once in a single file, string or dictionary.\r\n """', 217, 4, 221, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 221, 7, 222, 0),
woosh.Token(woosh.NAME, 'def', 223, 4, 223, 7),
woosh.Token(woosh.NAME, '__init__', 223, 8, 223, 16),
woosh.Token(woosh.OP, '(', 223, 16, 223, 17),
woosh.Token(woosh.NAME, 'self', 223, 17, 223, 21),
woosh.Token(woosh.OP, ',', 223, 21, 223, 22),
woosh.Token(woosh.NAME, 'section', 223, 23, 223, 30),
woosh.Token(woosh.OP, ',', 223, 30, 223, 31),
woosh.Token(woosh.NAME, 'option', 223, 32, 223, 38),
woosh.Token(woosh.OP, ',', 223, 38, 223, 39),
woosh.Token(woosh.NAME, 'source', 223, 40, 223, 46),
woosh.Token(woosh.OP, '=', 223, 46, 223, 47),
woosh.Token(woosh.NAME, 'None', 223, 47, 223, 51),
woosh.Token(woosh.OP, ',', 223, 51, 223, 52),
woosh.Token(woosh.NAME, 'lineno', 223, 53, 223, 59),
woosh.Token(woosh.OP, '=', 223, 59, 223, 60),
woosh.Token(woosh.NAME, 'None', 223, 60, 223, 64),
woosh.Token(woosh.OP, ')', 223, 64, 223, 65),
woosh.Token(woosh.OP, ':', 223, 65, 223, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 223, 66, 224, 0),
woosh.Token(woosh.INDENT, ' ', 224, 0, 224, 8),
woosh.Token(woosh.NAME, 'msg', 224, 8, 224, 11),
woosh.Token(woosh.OP, '=', 224, 12, 224, 13),
woosh.Token(woosh.OP, '[', 224, 14, 224, 15),
woosh.Token(woosh.NAME, 'repr', 224, 15, 224, 19),
woosh.Token(woosh.OP, '(', 224, 19, 224, 20),
woosh.Token(woosh.NAME, 'option', 224, 20, 224, 26),
woosh.Token(woosh.OP, ')', 224, 26, 224, 27),
woosh.Token(woosh.OP, ',', 224, 27, 224, 28),
woosh.Token(woosh.STRING, '" in section "', 224, 29, 224, 43),
woosh.Token(woosh.OP, ',', 224, 43, 224, 44),
woosh.Token(woosh.NAME, 'repr', 224, 45, 224, 49),
woosh.Token(woosh.OP, '(', 224, 49, 224, 50),
woosh.Token(woosh.NAME, 'section', 224, 50, 224, 57),
woosh.Token(woosh.OP, ')', 224, 57, 224, 58),
woosh.Token(woosh.OP, ',', 224, 58, 224, 59),
woosh.Token(woosh.STRING, '" already exists"', 225, 15, 225, 32),
woosh.Token(woosh.OP, ']', 225, 32, 225, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 225, 33, 226, 0),
woosh.Token(woosh.NAME, 'if', 226, 8, 226, 10),
woosh.Token(woosh.NAME, 'source', 226, 11, 226, 17),
woosh.Token(woosh.NAME, 'is', 226, 18, 226, 20),
woosh.Token(woosh.NAME, 'not', 226, 21, 226, 24),
woosh.Token(woosh.NAME, 'None', 226, 25, 226, 29),
woosh.Token(woosh.OP, ':', 226, 29, 226, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 226, 30, 227, 0),
woosh.Token(woosh.INDENT, ' ', 227, 0, 227, 12),
woosh.Token(woosh.NAME, 'message', 227, 12, 227, 19),
woosh.Token(woosh.OP, '=', 227, 20, 227, 21),
woosh.Token(woosh.OP, '[', 227, 22, 227, 23),
woosh.Token(woosh.STRING, '"While reading from "', 227, 23, 227, 44),
woosh.Token(woosh.OP, ',', 227, 44, 227, 45),
woosh.Token(woosh.NAME, 'repr', 227, 46, 227, 50),
woosh.Token(woosh.OP, '(', 227, 50, 227, 51),
woosh.Token(woosh.NAME, 'source', 227, 51, 227, 57),
woosh.Token(woosh.OP, ')', 227, 57, 227, 58),
woosh.Token(woosh.OP, ']', 227, 58, 227, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 227, 59, 228, 0),
woosh.Token(woosh.NAME, 'if', 228, 12, 228, 14),
woosh.Token(woosh.NAME, 'lineno', 228, 15, 228, 21),
woosh.Token(woosh.NAME, 'is', 228, 22, 228, 24),
woosh.Token(woosh.NAME, 'not', 228, 25, 228, 28),
woosh.Token(woosh.NAME, 'None', 228, 29, 228, 33),
woosh.Token(woosh.OP, ':', 228, 33, 228, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 228, 34, 229, 0),
woosh.Token(woosh.INDENT, ' ', 229, 0, 229, 16),
woosh.Token(woosh.NAME, 'message', 229, 16, 229, 23),
woosh.Token(woosh.OP, '.', 229, 23, 229, 24),
woosh.Token(woosh.NAME, 'append', 229, 24, 229, 30),
woosh.Token(woosh.OP, '(', 229, 30, 229, 31),
woosh.Token(woosh.STRING, '" [line {0:2d}]"', 229, 31, 229, 47),
woosh.Token(woosh.OP, '.', 229, 47, 229, 48),
woosh.Token(woosh.NAME, 'format', 229, 48, 229, 54),
woosh.Token(woosh.OP, '(', 229, 54, 229, 55),
woosh.Token(woosh.NAME, 'lineno', 229, 55, 229, 61),
woosh.Token(woosh.OP, ')', 229, 61, 229, 62),
woosh.Token(woosh.OP, ')', 229, 62, 229, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 229, 63, 230, 0),
woosh.Token(woosh.DEDENT, ' ', 230, 0, 230, 12),
woosh.Token(woosh.NAME, 'message', 230, 12, 230, 19),
woosh.Token(woosh.OP, '.', 230, 19, 230, 20),
woosh.Token(woosh.NAME, 'append', 230, 20, 230, 26),
woosh.Token(woosh.OP, '(', 230, 26, 230, 27),
woosh.Token(woosh.STRING, '": option "', 230, 27, 230, 38),
woosh.Token(woosh.OP, ')', 230, 38, 230, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 230, 39, 231, 0),
woosh.Token(woosh.NAME, 'message', 231, 12, 231, 19),
woosh.Token(woosh.OP, '.', 231, 19, 231, 20),
woosh.Token(woosh.NAME, 'extend', 231, 20, 231, 26),
woosh.Token(woosh.OP, '(', 231, 26, 231, 27),
woosh.Token(woosh.NAME, 'msg', 231, 27, 231, 30),
woosh.Token(woosh.OP, ')', 231, 30, 231, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 231, 31, 232, 0),
woosh.Token(woosh.NAME, 'msg', 232, 12, 232, 15),
woosh.Token(woosh.OP, '=', 232, 16, 232, 17),
woosh.Token(woosh.NAME, 'message', 232, 18, 232, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 232, 25, 233, 0),
woosh.Token(woosh.DEDENT, ' ', 233, 0, 233, 8),
woosh.Token(woosh.NAME, 'else', 233, 8, 233, 12),
woosh.Token(woosh.OP, ':', 233, 12, 233, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 233, 13, 234, 0),
woosh.Token(woosh.INDENT, ' ', 234, 0, 234, 12),
woosh.Token(woosh.NAME, 'msg', 234, 12, 234, 15),
woosh.Token(woosh.OP, '.', 234, 15, 234, 16),
woosh.Token(woosh.NAME, 'insert', 234, 16, 234, 22),
woosh.Token(woosh.OP, '(', 234, 22, 234, 23),
woosh.Token(woosh.NUMBER, '0', 234, 23, 234, 24),
woosh.Token(woosh.OP, ',', 234, 24, 234, 25),
woosh.Token(woosh.STRING, '"Option "', 234, 26, 234, 35),
woosh.Token(woosh.OP, ')', 234, 35, 234, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 234, 36, 235, 0),
woosh.Token(woosh.DEDENT, ' ', 235, 0, 235, 8),
woosh.Token(woosh.NAME, 'Error', 235, 8, 235, 13),
woosh.Token(woosh.OP, '.', 235, 13, 235, 14),
woosh.Token(woosh.NAME, '__init__', 235, 14, 235, 22),
woosh.Token(woosh.OP, '(', 235, 22, 235, 23),
woosh.Token(woosh.NAME, 'self', 235, 23, 235, 27),
woosh.Token(woosh.OP, ',', 235, 27, 235, 28),
woosh.Token(woosh.STRING, '""', 235, 29, 235, 31),
woosh.Token(woosh.OP, '.', 235, 31, 235, 32),
woosh.Token(woosh.NAME, 'join', 235, 32, 235, 36),
woosh.Token(woosh.OP, '(', 235, 36, 235, 37),
woosh.Token(woosh.NAME, 'msg', 235, 37, 235, 40),
woosh.Token(woosh.OP, ')', 235, 40, 235, 41),
woosh.Token(woosh.OP, ')', 235, 41, 235, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 235, 42, 236, 0),
woosh.Token(woosh.NAME, 'self', 236, 8, 236, 12),
woosh.Token(woosh.OP, '.', 236, 12, 236, 13),
woosh.Token(woosh.NAME, 'section', 236, 13, 236, 20),
woosh.Token(woosh.OP, '=', 236, 21, 236, 22),
woosh.Token(woosh.NAME, 'section', 236, 23, 236, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 236, 30, 237, 0),
woosh.Token(woosh.NAME, 'self', 237, 8, 237, 12),
woosh.Token(woosh.OP, '.', 237, 12, 237, 13),
woosh.Token(woosh.NAME, 'option', 237, 13, 237, 19),
woosh.Token(woosh.OP, '=', 237, 20, 237, 21),
woosh.Token(woosh.NAME, 'option', 237, 22, 237, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 237, 28, 238, 0),
woosh.Token(woosh.NAME, 'self', 238, 8, 238, 12),
woosh.Token(woosh.OP, '.', 238, 12, 238, 13),
woosh.Token(woosh.NAME, 'source', 238, 13, 238, 19),
woosh.Token(woosh.OP, '=', 238, 20, 238, 21),
woosh.Token(woosh.NAME, 'source', 238, 22, 238, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 238, 28, 239, 0),
woosh.Token(woosh.NAME, 'self', 239, 8, 239, 12),
woosh.Token(woosh.OP, '.', 239, 12, 239, 13),
woosh.Token(woosh.NAME, 'lineno', 239, 13, 239, 19),
woosh.Token(woosh.OP, '=', 239, 20, 239, 21),
woosh.Token(woosh.NAME, 'lineno', 239, 22, 239, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 239, 28, 240, 0),
woosh.Token(woosh.NAME, 'self', 240, 8, 240, 12),
woosh.Token(woosh.OP, '.', 240, 12, 240, 13),
woosh.Token(woosh.NAME, 'args', 240, 13, 240, 17),
woosh.Token(woosh.OP, '=', 240, 18, 240, 19),
woosh.Token(woosh.OP, '(', 240, 20, 240, 21),
woosh.Token(woosh.NAME, 'section', 240, 21, 240, 28),
woosh.Token(woosh.OP, ',', 240, 28, 240, 29),
woosh.Token(woosh.NAME, 'option', 240, 30, 240, 36),
woosh.Token(woosh.OP, ',', 240, 36, 240, 37),
woosh.Token(woosh.NAME, 'source', 240, 38, 240, 44),
woosh.Token(woosh.OP, ',', 240, 44, 240, 45),
woosh.Token(woosh.NAME, 'lineno', 240, 46, 240, 52),
woosh.Token(woosh.OP, ')', 240, 52, 240, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 240, 53, 241, 0),
woosh.Token(woosh.DEDENT, '', 243, 0, 243, 0),
woosh.Token(woosh.DEDENT, '', 243, 0, 243, 0),
woosh.Token(woosh.NAME, 'class', 243, 0, 243, 5),
woosh.Token(woosh.NAME, 'NoOptionError', 243, 6, 243, 19),
woosh.Token(woosh.OP, '(', 243, 19, 243, 20),
woosh.Token(woosh.NAME, 'Error', 243, 20, 243, 25),
woosh.Token(woosh.OP, ')', 243, 25, 243, 26),
woosh.Token(woosh.OP, ':', 243, 26, 243, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 243, 27, 244, 0),
woosh.Token(woosh.INDENT, ' ', 244, 0, 244, 4),
woosh.Token(woosh.STRING, '"""A requested option was not found."""', 244, 4, 244, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 244, 43, 245, 0),
woosh.Token(woosh.NAME, 'def', 246, 4, 246, 7),
woosh.Token(woosh.NAME, '__init__', 246, 8, 246, 16),
woosh.Token(woosh.OP, '(', 246, 16, 246, 17),
woosh.Token(woosh.NAME, 'self', 246, 17, 246, 21),
woosh.Token(woosh.OP, ',', 246, 21, 246, 22),
woosh.Token(woosh.NAME, 'option', 246, 23, 246, 29),
woosh.Token(woosh.OP, ',', 246, 29, 246, 30),
woosh.Token(woosh.NAME, 'section', 246, 31, 246, 38),
woosh.Token(woosh.OP, ')', 246, 38, 246, 39),
woosh.Token(woosh.OP, ':', 246, 39, 246, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 246, 40, 247, 0),
woosh.Token(woosh.INDENT, ' ', 247, 0, 247, 8),
woosh.Token(woosh.NAME, 'Error', 247, 8, 247, 13),
woosh.Token(woosh.OP, '.', 247, 13, 247, 14),
woosh.Token(woosh.NAME, '__init__', 247, 14, 247, 22),
woosh.Token(woosh.OP, '(', 247, 22, 247, 23),
woosh.Token(woosh.NAME, 'self', 247, 23, 247, 27),
woosh.Token(woosh.OP, ',', 247, 27, 247, 28),
woosh.Token(woosh.STRING, '"No option %r in section: %r"', 247, 29, 247, 58),
woosh.Token(woosh.OP, '%', 247, 59, 247, 60),
woosh.Token(woosh.OP, '(', 248, 23, 248, 24),
woosh.Token(woosh.NAME, 'option', 248, 24, 248, 30),
woosh.Token(woosh.OP, ',', 248, 30, 248, 31),
woosh.Token(woosh.NAME, 'section', 248, 32, 248, 39),
woosh.Token(woosh.OP, ')', 248, 39, 248, 40),
woosh.Token(woosh.OP, ')', 248, 40, 248, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 248, 41, 249, 0),
woosh.Token(woosh.NAME, 'self', 249, 8, 249, 12),
woosh.Token(woosh.OP, '.', 249, 12, 249, 13),
woosh.Token(woosh.NAME, 'option', 249, 13, 249, 19),
woosh.Token(woosh.OP, '=', 249, 20, 249, 21),
woosh.Token(woosh.NAME, 'option', 249, 22, 249, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 249, 28, 250, 0),
woosh.Token(woosh.NAME, 'self', 250, 8, 250, 12),
woosh.Token(woosh.OP, '.', 250, 12, 250, 13),
woosh.Token(woosh.NAME, 'section', 250, 13, 250, 20),
woosh.Token(woosh.OP, '=', 250, 21, 250, 22),
woosh.Token(woosh.NAME, 'section', 250, 23, 250, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 250, 30, 251, 0),
woosh.Token(woosh.NAME, 'self', 251, 8, 251, 12),
woosh.Token(woosh.OP, '.', 251, 12, 251, 13),
woosh.Token(woosh.NAME, 'args', 251, 13, 251, 17),
woosh.Token(woosh.OP, '=', 251, 18, 251, 19),
woosh.Token(woosh.OP, '(', 251, 20, 251, 21),
woosh.Token(woosh.NAME, 'option', 251, 21, 251, 27),
woosh.Token(woosh.OP, ',', 251, 27, 251, 28),
woosh.Token(woosh.NAME, 'section', 251, 29, 251, 36),
woosh.Token(woosh.OP, ')', 251, 36, 251, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 251, 37, 252, 0),
woosh.Token(woosh.DEDENT, '', 254, 0, 254, 0),
woosh.Token(woosh.DEDENT, '', 254, 0, 254, 0),
woosh.Token(woosh.NAME, 'class', 254, 0, 254, 5),
woosh.Token(woosh.NAME, 'InterpolationError', 254, 6, 254, 24),
woosh.Token(woosh.OP, '(', 254, 24, 254, 25),
woosh.Token(woosh.NAME, 'Error', 254, 25, 254, 30),
woosh.Token(woosh.OP, ')', 254, 30, 254, 31),
woosh.Token(woosh.OP, ':', 254, 31, 254, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 254, 32, 255, 0),
woosh.Token(woosh.INDENT, ' ', 255, 0, 255, 4),
woosh.Token(woosh.STRING, '"""Base class for interpolation-related exceptions."""', 255, 4, 255, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 255, 58, 256, 0),
woosh.Token(woosh.NAME, 'def', 257, 4, 257, 7),
woosh.Token(woosh.NAME, '__init__', 257, 8, 257, 16),
woosh.Token(woosh.OP, '(', 257, 16, 257, 17),
woosh.Token(woosh.NAME, 'self', 257, 17, 257, 21),
woosh.Token(woosh.OP, ',', 257, 21, 257, 22),
woosh.Token(woosh.NAME, 'option', 257, 23, 257, 29),
woosh.Token(woosh.OP, ',', 257, 29, 257, 30),
woosh.Token(woosh.NAME, 'section', 257, 31, 257, 38),
woosh.Token(woosh.OP, ',', 257, 38, 257, 39),
woosh.Token(woosh.NAME, 'msg', 257, 40, 257, 43),
woosh.Token(woosh.OP, ')', 257, 43, 257, 44),
woosh.Token(woosh.OP, ':', 257, 44, 257, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 257, 45, 258, 0),
woosh.Token(woosh.INDENT, ' ', 258, 0, 258, 8),
woosh.Token(woosh.NAME, 'Error', 258, 8, 258, 13),
woosh.Token(woosh.OP, '.', 258, 13, 258, 14),
woosh.Token(woosh.NAME, '__init__', 258, 14, 258, 22),
woosh.Token(woosh.OP, '(', 258, 22, 258, 23),
woosh.Token(woosh.NAME, 'self', 258, 23, 258, 27),
woosh.Token(woosh.OP, ',', 258, 27, 258, 28),
woosh.Token(woosh.NAME, 'msg', 258, 29, 258, 32),
woosh.Token(woosh.OP, ')', 258, 32, 258, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 258, 33, 259, 0),
woosh.Token(woosh.NAME, 'self', 259, 8, 259, 12),
woosh.Token(woosh.OP, '.', 259, 12, 259, 13),
woosh.Token(woosh.NAME, 'option', 259, 13, 259, 19),
woosh.Token(woosh.OP, '=', 259, 20, 259, 21),
woosh.Token(woosh.NAME, 'option', 259, 22, 259, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 259, 28, 260, 0),
woosh.Token(woosh.NAME, 'self', 260, 8, 260, 12),
woosh.Token(woosh.OP, '.', 260, 12, 260, 13),
woosh.Token(woosh.NAME, 'section', 260, 13, 260, 20),
woosh.Token(woosh.OP, '=', 260, 21, 260, 22),
woosh.Token(woosh.NAME, 'section', 260, 23, 260, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 260, 30, 261, 0),
woosh.Token(woosh.NAME, 'self', 261, 8, 261, 12),
woosh.Token(woosh.OP, '.', 261, 12, 261, 13),
woosh.Token(woosh.NAME, 'args', 261, 13, 261, 17),
woosh.Token(woosh.OP, '=', 261, 18, 261, 19),
woosh.Token(woosh.OP, '(', 261, 20, 261, 21),
woosh.Token(woosh.NAME, 'option', 261, 21, 261, 27),
woosh.Token(woosh.OP, ',', 261, 27, 261, 28),
woosh.Token(woosh.NAME, 'section', 261, 29, 261, 36),
woosh.Token(woosh.OP, ',', 261, 36, 261, 37),
woosh.Token(woosh.NAME, 'msg', 261, 38, 261, 41),
woosh.Token(woosh.OP, ')', 261, 41, 261, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 261, 42, 262, 0),
woosh.Token(woosh.DEDENT, '', 264, 0, 264, 0),
woosh.Token(woosh.DEDENT, '', 264, 0, 264, 0),
woosh.Token(woosh.NAME, 'class', 264, 0, 264, 5),
woosh.Token(woosh.NAME, 'InterpolationMissingOptionError', 264, 6, 264, 37),
woosh.Token(woosh.OP, '(', 264, 37, 264, 38),
woosh.Token(woosh.NAME, 'InterpolationError', 264, 38, 264, 56),
woosh.Token(woosh.OP, ')', 264, 56, 264, 57),
woosh.Token(woosh.OP, ':', 264, 57, 264, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 264, 58, 265, 0),
woosh.Token(woosh.INDENT, ' ', 265, 0, 265, 4),
woosh.Token(woosh.STRING, '"""A string substitution required a setting which was not available."""', 265, 4, 265, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 265, 75, 266, 0),
woosh.Token(woosh.NAME, 'def', 267, 4, 267, 7),
woosh.Token(woosh.NAME, '__init__', 267, 8, 267, 16),
woosh.Token(woosh.OP, '(', 267, 16, 267, 17),
woosh.Token(woosh.NAME, 'self', 267, 17, 267, 21),
woosh.Token(woosh.OP, ',', 267, 21, 267, 22),
woosh.Token(woosh.NAME, 'option', 267, 23, 267, 29),
woosh.Token(woosh.OP, ',', 267, 29, 267, 30),
woosh.Token(woosh.NAME, 'section', 267, 31, 267, 38),
woosh.Token(woosh.OP, ',', 267, 38, 267, 39),
woosh.Token(woosh.NAME, 'rawval', 267, 40, 267, 46),
woosh.Token(woosh.OP, ',', 267, 46, 267, 47),
woosh.Token(woosh.NAME, 'reference', 267, 48, 267, 57),
woosh.Token(woosh.OP, ')', 267, 57, 267, 58),
woosh.Token(woosh.OP, ':', 267, 58, 267, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 267, 59, 268, 0),
woosh.Token(woosh.INDENT, ' ', 268, 0, 268, 8),
woosh.Token(woosh.NAME, 'msg', 268, 8, 268, 11),
woosh.Token(woosh.OP, '=', 268, 12, 268, 13),
woosh.Token(woosh.OP, '(', 268, 14, 268, 15),
woosh.Token(woosh.STRING, '"Bad value substitution: option {!r} in section {!r} contains "', 268, 15, 268, 78),
woosh.Token(woosh.STRING, '"an interpolation key {!r} which is not a valid option name. "', 269, 15, 269, 77),
woosh.Token(woosh.STRING, '"Raw value: {!r}"', 270, 15, 270, 32),
woosh.Token(woosh.OP, '.', 270, 32, 270, 33),
woosh.Token(woosh.NAME, 'format', 270, 33, 270, 39),
woosh.Token(woosh.OP, '(', 270, 39, 270, 40),
woosh.Token(woosh.NAME, 'option', 270, 40, 270, 46),
woosh.Token(woosh.OP, ',', 270, 46, 270, 47),
woosh.Token(woosh.NAME, 'section', 270, 48, 270, 55),
woosh.Token(woosh.OP, ',', 270, 55, 270, 56),
woosh.Token(woosh.NAME, 'reference', 270, 57, 270, 66),
woosh.Token(woosh.OP, ',', 270, 66, 270, 67),
woosh.Token(woosh.NAME, 'rawval', 270, 68, 270, 74),
woosh.Token(woosh.OP, ')', 270, 74, 270, 75),
woosh.Token(woosh.OP, ')', 270, 75, 270, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 270, 76, 271, 0),
woosh.Token(woosh.NAME, 'InterpolationError', 271, 8, 271, 26),
woosh.Token(woosh.OP, '.', 271, 26, 271, 27),
woosh.Token(woosh.NAME, '__init__', 271, 27, 271, 35),
woosh.Token(woosh.OP, '(', 271, 35, 271, 36),
woosh.Token(woosh.NAME, 'self', 271, 36, 271, 40),
woosh.Token(woosh.OP, ',', 271, 40, 271, 41),
woosh.Token(woosh.NAME, 'option', 271, 42, 271, 48),
woosh.Token(woosh.OP, ',', 271, 48, 271, 49),
woosh.Token(woosh.NAME, 'section', 271, 50, 271, 57),
woosh.Token(woosh.OP, ',', 271, 57, 271, 58),
woosh.Token(woosh.NAME, 'msg', 271, 59, 271, 62),
woosh.Token(woosh.OP, ')', 271, 62, 271, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 271, 63, 272, 0),
woosh.Token(woosh.NAME, 'self', 272, 8, 272, 12),
woosh.Token(woosh.OP, '.', 272, 12, 272, 13),
woosh.Token(woosh.NAME, 'reference', 272, 13, 272, 22),
woosh.Token(woosh.OP, '=', 272, 23, 272, 24),
woosh.Token(woosh.NAME, 'reference', 272, 25, 272, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 272, 34, 273, 0),
woosh.Token(woosh.NAME, 'self', 273, 8, 273, 12),
woosh.Token(woosh.OP, '.', 273, 12, 273, 13),
woosh.Token(woosh.NAME, 'args', 273, 13, 273, 17),
woosh.Token(woosh.OP, '=', 273, 18, 273, 19),
woosh.Token(woosh.OP, '(', 273, 20, 273, 21),
woosh.Token(woosh.NAME, 'option', 273, 21, 273, 27),
woosh.Token(woosh.OP, ',', 273, 27, 273, 28),
woosh.Token(woosh.NAME, 'section', 273, 29, 273, 36),
woosh.Token(woosh.OP, ',', 273, 36, 273, 37),
woosh.Token(woosh.NAME, 'rawval', 273, 38, 273, 44),
woosh.Token(woosh.OP, ',', 273, 44, 273, 45),
woosh.Token(woosh.NAME, 'reference', 273, 46, 273, 55),
woosh.Token(woosh.OP, ')', 273, 55, 273, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 273, 56, 274, 0),
woosh.Token(woosh.DEDENT, '', 276, 0, 276, 0),
woosh.Token(woosh.DEDENT, '', 276, 0, 276, 0),
woosh.Token(woosh.NAME, 'class', 276, 0, 276, 5),
woosh.Token(woosh.NAME, 'InterpolationSyntaxError', 276, 6, 276, 30),
woosh.Token(woosh.OP, '(', 276, 30, 276, 31),
woosh.Token(woosh.NAME, 'InterpolationError', 276, 31, 276, 49),
woosh.Token(woosh.OP, ')', 276, 49, 276, 50),
woosh.Token(woosh.OP, ':', 276, 50, 276, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 276, 51, 277, 0),
woosh.Token(woosh.INDENT, ' ', 277, 0, 277, 4),
woosh.Token(woosh.STRING, '"""Raised when the source text contains invalid syntax.\r\n\r\n Current implementation raises this exception when the source text into\r\n which substitutions are made does not conform to the required syntax.\r\n """', 277, 4, 281, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 281, 7, 282, 0),
woosh.Token(woosh.DEDENT, '', 284, 0, 284, 0),
woosh.Token(woosh.NAME, 'class', 284, 0, 284, 5),
woosh.Token(woosh.NAME, 'InterpolationDepthError', 284, 6, 284, 29),
woosh.Token(woosh.OP, '(', 284, 29, 284, 30),
woosh.Token(woosh.NAME, 'InterpolationError', 284, 30, 284, 48),
woosh.Token(woosh.OP, ')', 284, 48, 284, 49),
woosh.Token(woosh.OP, ':', 284, 49, 284, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 284, 50, 285, 0),
woosh.Token(woosh.INDENT, ' ', 285, 0, 285, 4),
woosh.Token(woosh.STRING, '"""Raised when substitutions are nested too deeply."""', 285, 4, 285, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 285, 58, 286, 0),
woosh.Token(woosh.NAME, 'def', 287, 4, 287, 7),
woosh.Token(woosh.NAME, '__init__', 287, 8, 287, 16),
woosh.Token(woosh.OP, '(', 287, 16, 287, 17),
woosh.Token(woosh.NAME, 'self', 287, 17, 287, 21),
woosh.Token(woosh.OP, ',', 287, 21, 287, 22),
woosh.Token(woosh.NAME, 'option', 287, 23, 287, 29),
woosh.Token(woosh.OP, ',', 287, 29, 287, 30),
woosh.Token(woosh.NAME, 'section', 287, 31, 287, 38),
woosh.Token(woosh.OP, ',', 287, 38, 287, 39),
woosh.Token(woosh.NAME, 'rawval', 287, 40, 287, 46),
woosh.Token(woosh.OP, ')', 287, 46, 287, 47),
woosh.Token(woosh.OP, ':', 287, 47, 287, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 287, 48, 288, 0),
woosh.Token(woosh.INDENT, ' ', 288, 0, 288, 8),
woosh.Token(woosh.NAME, 'msg', 288, 8, 288, 11),
woosh.Token(woosh.OP, '=', 288, 12, 288, 13),
woosh.Token(woosh.OP, '(', 288, 14, 288, 15),
woosh.Token(woosh.STRING, '"Recursion limit exceeded in value substitution: option {!r} "', 288, 15, 288, 77),
woosh.Token(woosh.STRING, '"in section {!r} contains an interpolation key which "', 289, 15, 289, 69),
woosh.Token(woosh.STRING, '"cannot be substituted in {} steps. Raw value: {!r}"', 290, 15, 290, 67),
woosh.Token(woosh.STRING, '""', 291, 15, 291, 17),
woosh.Token(woosh.OP, '.', 291, 17, 291, 18),
woosh.Token(woosh.NAME, 'format', 291, 18, 291, 24),
woosh.Token(woosh.OP, '(', 291, 24, 291, 25),
woosh.Token(woosh.NAME, 'option', 291, 25, 291, 31),
woosh.Token(woosh.OP, ',', 291, 31, 291, 32),
woosh.Token(woosh.NAME, 'section', 291, 33, 291, 40),
woosh.Token(woosh.OP, ',', 291, 40, 291, 41),
woosh.Token(woosh.NAME, 'MAX_INTERPOLATION_DEPTH', 291, 42, 291, 65),
woosh.Token(woosh.OP, ',', 291, 65, 291, 66),
woosh.Token(woosh.NAME, 'rawval', 292, 25, 292, 31),
woosh.Token(woosh.OP, ')', 292, 31, 292, 32),
woosh.Token(woosh.OP, ')', 292, 32, 292, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 292, 33, 293, 0),
woosh.Token(woosh.NAME, 'InterpolationError', 293, 8, 293, 26),
woosh.Token(woosh.OP, '.', 293, 26, 293, 27),
woosh.Token(woosh.NAME, '__init__', 293, 27, 293, 35),
woosh.Token(woosh.OP, '(', 293, 35, 293, 36),
woosh.Token(woosh.NAME, 'self', 293, 36, 293, 40),
woosh.Token(woosh.OP, ',', 293, 40, 293, 41),
woosh.Token(woosh.NAME, 'option', 293, 42, 293, 48),
woosh.Token(woosh.OP, ',', 293, 48, 293, 49),
woosh.Token(woosh.NAME, 'section', 293, 50, 293, 57),
woosh.Token(woosh.OP, ',', 293, 57, 293, 58),
woosh.Token(woosh.NAME, 'msg', 293, 59, 293, 62),
woosh.Token(woosh.OP, ')', 293, 62, 293, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 293, 63, 294, 0),
woosh.Token(woosh.NAME, 'self', 294, 8, 294, 12),
woosh.Token(woosh.OP, '.', 294, 12, 294, 13),
woosh.Token(woosh.NAME, 'args', 294, 13, 294, 17),
woosh.Token(woosh.OP, '=', 294, 18, 294, 19),
woosh.Token(woosh.OP, '(', 294, 20, 294, 21),
woosh.Token(woosh.NAME, 'option', 294, 21, 294, 27),
woosh.Token(woosh.OP, ',', 294, 27, 294, 28),
woosh.Token(woosh.NAME, 'section', 294, 29, 294, 36),
woosh.Token(woosh.OP, ',', 294, 36, 294, 37),
woosh.Token(woosh.NAME, 'rawval', 294, 38, 294, 44),
woosh.Token(woosh.OP, ')', 294, 44, 294, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 294, 45, 295, 0),
woosh.Token(woosh.DEDENT, '', 297, 0, 297, 0),
woosh.Token(woosh.DEDENT, '', 297, 0, 297, 0),
woosh.Token(woosh.NAME, 'class', 297, 0, 297, 5),
woosh.Token(woosh.NAME, 'ParsingError', 297, 6, 297, 18),
woosh.Token(woosh.OP, '(', 297, 18, 297, 19),
woosh.Token(woosh.NAME, 'Error', 297, 19, 297, 24),
woosh.Token(woosh.OP, ')', 297, 24, 297, 25),
woosh.Token(woosh.OP, ':', 297, 25, 297, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 297, 26, 298, 0),
woosh.Token(woosh.INDENT, ' ', 298, 0, 298, 4),
woosh.Token(woosh.STRING, '"""Raised when a configuration file does not follow legal syntax."""', 298, 4, 298, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 298, 72, 299, 0),
woosh.Token(woosh.NAME, 'def', 300, 4, 300, 7),
woosh.Token(woosh.NAME, '__init__', 300, 8, 300, 16),
woosh.Token(woosh.OP, '(', 300, 16, 300, 17),
woosh.Token(woosh.NAME, 'self', 300, 17, 300, 21),
woosh.Token(woosh.OP, ',', 300, 21, 300, 22),
woosh.Token(woosh.NAME, 'source', 300, 23, 300, 29),
woosh.Token(woosh.OP, '=', 300, 29, 300, 30),
woosh.Token(woosh.NAME, 'None', 300, 30, 300, 34),
woosh.Token(woosh.OP, ',', 300, 34, 300, 35),
woosh.Token(woosh.NAME, 'filename', 300, 36, 300, 44),
woosh.Token(woosh.OP, '=', 300, 44, 300, 45),
woosh.Token(woosh.NAME, 'None', 300, 45, 300, 49),
woosh.Token(woosh.OP, ')', 300, 49, 300, 50),
woosh.Token(woosh.OP, ':', 300, 50, 300, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 300, 51, 301, 0),
woosh.Token(woosh.COMMENT, "# Exactly one of `source'/`filename' arguments has to be given.", 301, 8, 301, 71),
woosh.Token(woosh.COMMENT, "# `filename' kept for compatibility.", 302, 8, 302, 44),
woosh.Token(woosh.INDENT, ' ', 303, 0, 303, 8),
woosh.Token(woosh.NAME, 'if', 303, 8, 303, 10),
woosh.Token(woosh.NAME, 'filename', 303, 11, 303, 19),
woosh.Token(woosh.NAME, 'and', 303, 20, 303, 23),
woosh.Token(woosh.NAME, 'source', 303, 24, 303, 30),
woosh.Token(woosh.OP, ':', 303, 30, 303, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 303, 31, 304, 0),
woosh.Token(woosh.INDENT, ' ', 304, 0, 304, 12),
woosh.Token(woosh.NAME, 'raise', 304, 12, 304, 17),
woosh.Token(woosh.NAME, 'ValueError', 304, 18, 304, 28),
woosh.Token(woosh.OP, '(', 304, 28, 304, 29),
woosh.Token(woosh.STRING, '"Cannot specify both `filename\' and `source\'. "', 304, 29, 304, 76),
woosh.Token(woosh.STRING, '"Use `source\'."', 305, 29, 305, 44),
woosh.Token(woosh.OP, ')', 305, 44, 305, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 305, 45, 306, 0),
woosh.Token(woosh.DEDENT, ' ', 306, 0, 306, 8),
woosh.Token(woosh.NAME, 'elif', 306, 8, 306, 12),
woosh.Token(woosh.NAME, 'not', 306, 13, 306, 16),
woosh.Token(woosh.NAME, 'filename', 306, 17, 306, 25),
woosh.Token(woosh.NAME, 'and', 306, 26, 306, 29),
woosh.Token(woosh.NAME, 'not', 306, 30, 306, 33),
woosh.Token(woosh.NAME, 'source', 306, 34, 306, 40),
woosh.Token(woosh.OP, ':', 306, 40, 306, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 306, 41, 307, 0),
woosh.Token(woosh.INDENT, ' ', 307, 0, 307, 12),
woosh.Token(woosh.NAME, 'raise', 307, 12, 307, 17),
woosh.Token(woosh.NAME, 'ValueError', 307, 18, 307, 28),
woosh.Token(woosh.OP, '(', 307, 28, 307, 29),
woosh.Token(woosh.STRING, '"Required argument `source\' not given."', 307, 29, 307, 68),
woosh.Token(woosh.OP, ')', 307, 68, 307, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 307, 69, 308, 0),
woosh.Token(woosh.DEDENT, ' ', 308, 0, 308, 8),
woosh.Token(woosh.NAME, 'elif', 308, 8, 308, 12),
woosh.Token(woosh.NAME, 'filename', 308, 13, 308, 21),
woosh.Token(woosh.OP, ':', 308, 21, 308, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 308, 22, 309, 0),
woosh.Token(woosh.INDENT, ' ', 309, 0, 309, 12),
woosh.Token(woosh.NAME, 'source', 309, 12, 309, 18),
woosh.Token(woosh.OP, '=', 309, 19, 309, 20),
woosh.Token(woosh.NAME, 'filename', 309, 21, 309, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 309, 29, 310, 0),
woosh.Token(woosh.DEDENT, ' ', 310, 0, 310, 8),
woosh.Token(woosh.NAME, 'Error', 310, 8, 310, 13),
woosh.Token(woosh.OP, '.', 310, 13, 310, 14),
woosh.Token(woosh.NAME, '__init__', 310, 14, 310, 22),
woosh.Token(woosh.OP, '(', 310, 22, 310, 23),
woosh.Token(woosh.NAME, 'self', 310, 23, 310, 27),
woosh.Token(woosh.OP, ',', 310, 27, 310, 28),
woosh.Token(woosh.STRING, "'Source contains parsing errors: %r'", 310, 29, 310, 65),
woosh.Token(woosh.OP, '%', 310, 66, 310, 67),
woosh.Token(woosh.NAME, 'source', 310, 68, 310, 74),
woosh.Token(woosh.OP, ')', 310, 74, 310, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 310, 75, 311, 0),
woosh.Token(woosh.NAME, 'self', 311, 8, 311, 12),
woosh.Token(woosh.OP, '.', 311, 12, 311, 13),
woosh.Token(woosh.NAME, 'source', 311, 13, 311, 19),
woosh.Token(woosh.OP, '=', 311, 20, 311, 21),
woosh.Token(woosh.NAME, 'source', 311, 22, 311, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 311, 28, 312, 0),
woosh.Token(woosh.NAME, 'self', 312, 8, 312, 12),
woosh.Token(woosh.OP, '.', 312, 12, 312, 13),
woosh.Token(woosh.NAME, 'errors', 312, 13, 312, 19),
woosh.Token(woosh.OP, '=', 312, 20, 312, 21),
woosh.Token(woosh.OP, '[', 312, 22, 312, 23),
woosh.Token(woosh.OP, ']', 312, 23, 312, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 312, 24, 313, 0),
woosh.Token(woosh.NAME, 'self', 313, 8, 313, 12),
woosh.Token(woosh.OP, '.', 313, 12, 313, 13),
woosh.Token(woosh.NAME, 'args', 313, 13, 313, 17),
woosh.Token(woosh.OP, '=', 313, 18, 313, 19),
woosh.Token(woosh.OP, '(', 313, 20, 313, 21),
woosh.Token(woosh.NAME, 'source', 313, 21, 313, 27),
woosh.Token(woosh.OP, ',', 313, 27, 313, 28),
woosh.Token(woosh.OP, ')', 313, 29, 313, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 313, 30, 314, 0),
woosh.Token(woosh.DEDENT, ' ', 315, 0, 315, 4),
woosh.Token(woosh.OP, '@', 315, 4, 315, 5),
woosh.Token(woosh.NAME, 'property', 315, 5, 315, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 315, 13, 316, 0),
woosh.Token(woosh.NAME, 'def', 316, 4, 316, 7),
woosh.Token(woosh.NAME, 'filename', 316, 8, 316, 16),
woosh.Token(woosh.OP, '(', 316, 16, 316, 17),
woosh.Token(woosh.NAME, 'self', 316, 17, 316, 21),
woosh.Token(woosh.OP, ')', 316, 21, 316, 22),
woosh.Token(woosh.OP, ':', 316, 22, 316, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 316, 23, 317, 0),
woosh.Token(woosh.INDENT, ' ', 317, 0, 317, 8),
woosh.Token(woosh.STRING, '"""Deprecated, use `source\'."""', 317, 8, 317, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 317, 39, 318, 0),
woosh.Token(woosh.NAME, 'warnings', 318, 8, 318, 16),
woosh.Token(woosh.OP, '.', 318, 16, 318, 17),
woosh.Token(woosh.NAME, 'warn', 318, 17, 318, 21),
woosh.Token(woosh.OP, '(', 318, 21, 318, 22),
woosh.Token(woosh.STRING, '"The \'filename\' attribute will be removed in future versions. "', 319, 12, 319, 76),
woosh.Token(woosh.STRING, '"Use \'source\' instead."', 320, 12, 320, 35),
woosh.Token(woosh.OP, ',', 320, 35, 320, 36),
woosh.Token(woosh.NAME, 'DeprecationWarning', 321, 12, 321, 30),
woosh.Token(woosh.OP, ',', 321, 30, 321, 31),
woosh.Token(woosh.NAME, 'stacklevel', 321, 32, 321, 42),
woosh.Token(woosh.OP, '=', 321, 42, 321, 43),
woosh.Token(woosh.NUMBER, '2', 321, 43, 321, 44),
woosh.Token(woosh.OP, ')', 322, 8, 322, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 322, 9, 323, 0),
woosh.Token(woosh.NAME, 'return', 323, 8, 323, 14),
woosh.Token(woosh.NAME, 'self', 323, 15, 323, 19),
woosh.Token(woosh.OP, '.', 323, 19, 323, 20),
woosh.Token(woosh.NAME, 'source', 323, 20, 323, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 323, 26, 324, 0),
woosh.Token(woosh.DEDENT, ' ', 325, 0, 325, 4),
woosh.Token(woosh.OP, '@', 325, 4, 325, 5),
woosh.Token(woosh.NAME, 'filename', 325, 5, 325, 13),
woosh.Token(woosh.OP, '.', 325, 13, 325, 14),
woosh.Token(woosh.NAME, 'setter', 325, 14, 325, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 325, 20, 326, 0),
woosh.Token(woosh.NAME, 'def', 326, 4, 326, 7),
woosh.Token(woosh.NAME, 'filename', 326, 8, 326, 16),
woosh.Token(woosh.OP, '(', 326, 16, 326, 17),
woosh.Token(woosh.NAME, 'self', 326, 17, 326, 21),
woosh.Token(woosh.OP, ',', 326, 21, 326, 22),
woosh.Token(woosh.NAME, 'value', 326, 23, 326, 28),
woosh.Token(woosh.OP, ')', 326, 28, 326, 29),
woosh.Token(woosh.OP, ':', 326, 29, 326, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 326, 30, 327, 0),
woosh.Token(woosh.INDENT, ' ', 327, 0, 327, 8),
woosh.Token(woosh.STRING, '"""Deprecated, user `source\'."""', 327, 8, 327, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 327, 40, 328, 0),
woosh.Token(woosh.NAME, 'warnings', 328, 8, 328, 16),
woosh.Token(woosh.OP, '.', 328, 16, 328, 17),
woosh.Token(woosh.NAME, 'warn', 328, 17, 328, 21),
woosh.Token(woosh.OP, '(', 328, 21, 328, 22),
woosh.Token(woosh.STRING, '"The \'filename\' attribute will be removed in future versions. "', 329, 12, 329, 76),
woosh.Token(woosh.STRING, '"Use \'source\' instead."', 330, 12, 330, 35),
woosh.Token(woosh.OP, ',', 330, 35, 330, 36),
woosh.Token(woosh.NAME, 'DeprecationWarning', 331, 12, 331, 30),
woosh.Token(woosh.OP, ',', 331, 30, 331, 31),
woosh.Token(woosh.NAME, 'stacklevel', 331, 32, 331, 42),
woosh.Token(woosh.OP, '=', 331, 42, 331, 43),
woosh.Token(woosh.NUMBER, '2', 331, 43, 331, 44),
woosh.Token(woosh.OP, ')', 332, 8, 332, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 332, 9, 333, 0),
woosh.Token(woosh.NAME, 'self', 333, 8, 333, 12),
woosh.Token(woosh.OP, '.', 333, 12, 333, 13),
woosh.Token(woosh.NAME, 'source', 333, 13, 333, 19),
woosh.Token(woosh.OP, '=', 333, 20, 333, 21),
woosh.Token(woosh.NAME, 'value', 333, 22, 333, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 333, 27, 334, 0),
woosh.Token(woosh.DEDENT, ' ', 335, 0, 335, 4),
woosh.Token(woosh.NAME, 'def', 335, 4, 335, 7),
woosh.Token(woosh.NAME, 'append', 335, 8, 335, 14),
woosh.Token(woosh.OP, '(', 335, 14, 335, 15),
woosh.Token(woosh.NAME, 'self', 335, 15, 335, 19),
woosh.Token(woosh.OP, ',', 335, 19, 335, 20),
woosh.Token(woosh.NAME, 'lineno', 335, 21, 335, 27),
woosh.Token(woosh.OP, ',', 335, 27, 335, 28),
woosh.Token(woosh.NAME, 'line', 335, 29, 335, 33),
woosh.Token(woosh.OP, ')', 335, 33, 335, 34),
woosh.Token(woosh.OP, ':', 335, 34, 335, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 335, 35, 336, 0),
woosh.Token(woosh.INDENT, ' ', 336, 0, 336, 8),
woosh.Token(woosh.NAME, 'self', 336, 8, 336, 12),
woosh.Token(woosh.OP, '.', 336, 12, 336, 13),
woosh.Token(woosh.NAME, 'errors', 336, 13, 336, 19),
woosh.Token(woosh.OP, '.', 336, 19, 336, 20),
woosh.Token(woosh.NAME, 'append', 336, 20, 336, 26),
woosh.Token(woosh.OP, '(', 336, 26, 336, 27),
woosh.Token(woosh.OP, '(', 336, 27, 336, 28),
woosh.Token(woosh.NAME, 'lineno', 336, 28, 336, 34),
woosh.Token(woosh.OP, ',', 336, 34, 336, 35),
woosh.Token(woosh.NAME, 'line', 336, 36, 336, 40),
woosh.Token(woosh.OP, ')', 336, 40, 336, 41),
woosh.Token(woosh.OP, ')', 336, 41, 336, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 336, 42, 337, 0),
woosh.Token(woosh.NAME, 'self', 337, 8, 337, 12),
woosh.Token(woosh.OP, '.', 337, 12, 337, 13),
woosh.Token(woosh.NAME, 'message', 337, 13, 337, 20),
woosh.Token(woosh.OP, '+=', 337, 21, 337, 23),
woosh.Token(woosh.STRING, "'\\n\\t[line %2d]: %s'", 337, 24, 337, 44),
woosh.Token(woosh.OP, '%', 337, 45, 337, 46),
woosh.Token(woosh.OP, '(', 337, 47, 337, 48),
woosh.Token(woosh.NAME, 'lineno', 337, 48, 337, 54),
woosh.Token(woosh.OP, ',', 337, 54, 337, 55),
woosh.Token(woosh.NAME, 'line', 337, 56, 337, 60),
woosh.Token(woosh.OP, ')', 337, 60, 337, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 337, 61, 338, 0),
woosh.Token(woosh.DEDENT, '', 340, 0, 340, 0),
woosh.Token(woosh.DEDENT, '', 340, 0, 340, 0),
woosh.Token(woosh.NAME, 'class', 340, 0, 340, 5),
woosh.Token(woosh.NAME, 'MissingSectionHeaderError', 340, 6, 340, 31),
woosh.Token(woosh.OP, '(', 340, 31, 340, 32),
woosh.Token(woosh.NAME, 'ParsingError', 340, 32, 340, 44),
woosh.Token(woosh.OP, ')', 340, 44, 340, 45),
woosh.Token(woosh.OP, ':', 340, 45, 340, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 340, 46, 341, 0),
woosh.Token(woosh.INDENT, ' ', 341, 0, 341, 4),
woosh.Token(woosh.STRING, '"""Raised when a key-value pair is found before any section header."""', 341, 4, 341, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 341, 74, 342, 0),
woosh.Token(woosh.NAME, 'def', 343, 4, 343, 7),
woosh.Token(woosh.NAME, '__init__', 343, 8, 343, 16),
woosh.Token(woosh.OP, '(', 343, 16, 343, 17),
woosh.Token(woosh.NAME, 'self', 343, 17, 343, 21),
woosh.Token(woosh.OP, ',', 343, 21, 343, 22),
woosh.Token(woosh.NAME, 'filename', 343, 23, 343, 31),
woosh.Token(woosh.OP, ',', 343, 31, 343, 32),
woosh.Token(woosh.NAME, 'lineno', 343, 33, 343, 39),
woosh.Token(woosh.OP, ',', 343, 39, 343, 40),
woosh.Token(woosh.NAME, 'line', 343, 41, 343, 45),
woosh.Token(woosh.OP, ')', 343, 45, 343, 46),
woosh.Token(woosh.OP, ':', 343, 46, 343, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 343, 47, 344, 0),
woosh.Token(woosh.INDENT, ' ', 344, 0, 344, 8),
woosh.Token(woosh.NAME, 'Error', 344, 8, 344, 13),
woosh.Token(woosh.OP, '.', 344, 13, 344, 14),
woosh.Token(woosh.NAME, '__init__', 344, 14, 344, 22),
woosh.Token(woosh.OP, '(', 344, 22, 344, 23),
woosh.Token(woosh.NAME, 'self', 345, 12, 345, 16),
woosh.Token(woosh.OP, ',', 345, 16, 345, 17),
woosh.Token(woosh.STRING, "'File contains no section headers.\\nfile: %r, line: %d\\n%r'", 346, 12, 346, 71),
woosh.Token(woosh.OP, '%', 346, 72, 346, 73),
woosh.Token(woosh.OP, '(', 347, 12, 347, 13),
woosh.Token(woosh.NAME, 'filename', 347, 13, 347, 21),
woosh.Token(woosh.OP, ',', 347, 21, 347, 22),
woosh.Token(woosh.NAME, 'lineno', 347, 23, 347, 29),
woosh.Token(woosh.OP, ',', 347, 29, 347, 30),
woosh.Token(woosh.NAME, 'line', 347, 31, 347, 35),
woosh.Token(woosh.OP, ')', 347, 35, 347, 36),
woosh.Token(woosh.OP, ')', 347, 36, 347, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 347, 37, 348, 0),
woosh.Token(woosh.NAME, 'self', 348, 8, 348, 12),
woosh.Token(woosh.OP, '.', 348, 12, 348, 13),
woosh.Token(woosh.NAME, 'source', 348, 13, 348, 19),
woosh.Token(woosh.OP, '=', 348, 20, 348, 21),
woosh.Token(woosh.NAME, 'filename', 348, 22, 348, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 348, 30, 349, 0),
woosh.Token(woosh.NAME, 'self', 349, 8, 349, 12),
woosh.Token(woosh.OP, '.', 349, 12, 349, 13),
woosh.Token(woosh.NAME, 'lineno', 349, 13, 349, 19),
woosh.Token(woosh.OP, '=', 349, 20, 349, 21),
woosh.Token(woosh.NAME, 'lineno', 349, 22, 349, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 349, 28, 350, 0),
woosh.Token(woosh.NAME, 'self', 350, 8, 350, 12),
woosh.Token(woosh.OP, '.', 350, 12, 350, 13),
woosh.Token(woosh.NAME, 'line', 350, 13, 350, 17),
woosh.Token(woosh.OP, '=', 350, 18, 350, 19),
woosh.Token(woosh.NAME, 'line', 350, 20, 350, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 350, 24, 351, 0),
woosh.Token(woosh.NAME, 'self', 351, 8, 351, 12),
woosh.Token(woosh.OP, '.', 351, 12, 351, 13),
woosh.Token(woosh.NAME, 'args', 351, 13, 351, 17),
woosh.Token(woosh.OP, '=', 351, 18, 351, 19),
woosh.Token(woosh.OP, '(', 351, 20, 351, 21),
woosh.Token(woosh.NAME, 'filename', 351, 21, 351, 29),
woosh.Token(woosh.OP, ',', 351, 29, 351, 30),
woosh.Token(woosh.NAME, 'lineno', 351, 31, 351, 37),
woosh.Token(woosh.OP, ',', 351, 37, 351, 38),
woosh.Token(woosh.NAME, 'line', 351, 39, 351, 43),
woosh.Token(woosh.OP, ')', 351, 43, 351, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 351, 44, 352, 0),
woosh.Token(woosh.COMMENT, '# Used in parser getters to indicate the default behaviour when a specific', 354, 0, 354, 74),
woosh.Token(woosh.COMMENT, "# option is not found it to raise an exception. Created to enable `None' as", 355, 0, 355, 75),
woosh.Token(woosh.COMMENT, '# a valid fallback value.', 356, 0, 356, 25),
woosh.Token(woosh.DEDENT, '', 357, 0, 357, 0),
woosh.Token(woosh.DEDENT, '', 357, 0, 357, 0),
woosh.Token(woosh.NAME, '_UNSET', 357, 0, 357, 6),
woosh.Token(woosh.OP, '=', 357, 7, 357, 8),
woosh.Token(woosh.NAME, 'object', 357, 9, 357, 15),
woosh.Token(woosh.OP, '(', 357, 15, 357, 16),
woosh.Token(woosh.OP, ')', 357, 16, 357, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 357, 17, 358, 0),
woosh.Token(woosh.NAME, 'class', 360, 0, 360, 5),
woosh.Token(woosh.NAME, 'Interpolation', 360, 6, 360, 19),
woosh.Token(woosh.OP, ':', 360, 19, 360, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 360, 20, 361, 0),
woosh.Token(woosh.INDENT, ' ', 361, 0, 361, 4),
woosh.Token(woosh.STRING, '"""Dummy interpolation that passes the value through with no changes."""', 361, 4, 361, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 361, 76, 362, 0),
woosh.Token(woosh.NAME, 'def', 363, 4, 363, 7),
woosh.Token(woosh.NAME, 'before_get', 363, 8, 363, 18),
woosh.Token(woosh.OP, '(', 363, 18, 363, 19),
woosh.Token(woosh.NAME, 'self', 363, 19, 363, 23),
woosh.Token(woosh.OP, ',', 363, 23, 363, 24),
woosh.Token(woosh.NAME, 'parser', 363, 25, 363, 31),
woosh.Token(woosh.OP, ',', 363, 31, 363, 32),
woosh.Token(woosh.NAME, 'section', 363, 33, 363, 40),
woosh.Token(woosh.OP, ',', 363, 40, 363, 41),
woosh.Token(woosh.NAME, 'option', 363, 42, 363, 48),
woosh.Token(woosh.OP, ',', 363, 48, 363, 49),
woosh.Token(woosh.NAME, 'value', 363, 50, 363, 55),
woosh.Token(woosh.OP, ',', 363, 55, 363, 56),
woosh.Token(woosh.NAME, 'defaults', 363, 57, 363, 65),
woosh.Token(woosh.OP, ')', 363, 65, 363, 66),
woosh.Token(woosh.OP, ':', 363, 66, 363, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 363, 67, 364, 0),
woosh.Token(woosh.INDENT, ' ', 364, 0, 364, 8),
woosh.Token(woosh.NAME, 'return', 364, 8, 364, 14),
woosh.Token(woosh.NAME, 'value', 364, 15, 364, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 364, 20, 365, 0),
woosh.Token(woosh.DEDENT, ' ', 366, 0, 366, 4),
woosh.Token(woosh.NAME, 'def', 366, 4, 366, 7),
woosh.Token(woosh.NAME, 'before_set', 366, 8, 366, 18),
woosh.Token(woosh.OP, '(', 366, 18, 366, 19),
woosh.Token(woosh.NAME, 'self', 366, 19, 366, 23),
woosh.Token(woosh.OP, ',', 366, 23, 366, 24),
woosh.Token(woosh.NAME, 'parser', 366, 25, 366, 31),
woosh.Token(woosh.OP, ',', 366, 31, 366, 32),
woosh.Token(woosh.NAME, 'section', 366, 33, 366, 40),
woosh.Token(woosh.OP, ',', 366, 40, 366, 41),
woosh.Token(woosh.NAME, 'option', 366, 42, 366, 48),
woosh.Token(woosh.OP, ',', 366, 48, 366, 49),
woosh.Token(woosh.NAME, 'value', 366, 50, 366, 55),
woosh.Token(woosh.OP, ')', 366, 55, 366, 56),
woosh.Token(woosh.OP, ':', 366, 56, 366, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 366, 57, 367, 0),
woosh.Token(woosh.INDENT, ' ', 367, 0, 367, 8),
woosh.Token(woosh.NAME, 'return', 367, 8, 367, 14),
woosh.Token(woosh.NAME, 'value', 367, 15, 367, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 367, 20, 368, 0),
woosh.Token(woosh.DEDENT, ' ', 369, 0, 369, 4),
woosh.Token(woosh.NAME, 'def', 369, 4, 369, 7),
woosh.Token(woosh.NAME, 'before_read', 369, 8, 369, 19),
woosh.Token(woosh.OP, '(', 369, 19, 369, 20),
woosh.Token(woosh.NAME, 'self', 369, 20, 369, 24),
woosh.Token(woosh.OP, ',', 369, 24, 369, 25),
woosh.Token(woosh.NAME, 'parser', 369, 26, 369, 32),
woosh.Token(woosh.OP, ',', 369, 32, 369, 33),
woosh.Token(woosh.NAME, 'section', 369, 34, 369, 41),
woosh.Token(woosh.OP, ',', 369, 41, 369, 42),
woosh.Token(woosh.NAME, 'option', 369, 43, 369, 49),
woosh.Token(woosh.OP, ',', 369, 49, 369, 50),
woosh.Token(woosh.NAME, 'value', 369, 51, 369, 56),
woosh.Token(woosh.OP, ')', 369, 56, 369, 57),
woosh.Token(woosh.OP, ':', 369, 57, 369, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 369, 58, 370, 0),
woosh.Token(woosh.INDENT, ' ', 370, 0, 370, 8),
woosh.Token(woosh.NAME, 'return', 370, 8, 370, 14),
woosh.Token(woosh.NAME, 'value', 370, 15, 370, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 370, 20, 371, 0),
woosh.Token(woosh.DEDENT, ' ', 372, 0, 372, 4),
woosh.Token(woosh.NAME, 'def', 372, 4, 372, 7),
woosh.Token(woosh.NAME, 'before_write', 372, 8, 372, 20),
woosh.Token(woosh.OP, '(', 372, 20, 372, 21),
woosh.Token(woosh.NAME, 'self', 372, 21, 372, 25),
woosh.Token(woosh.OP, ',', 372, 25, 372, 26),
woosh.Token(woosh.NAME, 'parser', 372, 27, 372, 33),
woosh.Token(woosh.OP, ',', 372, 33, 372, 34),
woosh.Token(woosh.NAME, 'section', 372, 35, 372, 42),
woosh.Token(woosh.OP, ',', 372, 42, 372, 43),
woosh.Token(woosh.NAME, 'option', 372, 44, 372, 50),
woosh.Token(woosh.OP, ',', 372, 50, 372, 51),
woosh.Token(woosh.NAME, 'value', 372, 52, 372, 57),
woosh.Token(woosh.OP, ')', 372, 57, 372, 58),
woosh.Token(woosh.OP, ':', 372, 58, 372, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 372, 59, 373, 0),
woosh.Token(woosh.INDENT, ' ', 373, 0, 373, 8),
woosh.Token(woosh.NAME, 'return', 373, 8, 373, 14),
woosh.Token(woosh.NAME, 'value', 373, 15, 373, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 373, 20, 374, 0),
woosh.Token(woosh.DEDENT, '', 376, 0, 376, 0),
woosh.Token(woosh.DEDENT, '', 376, 0, 376, 0),
woosh.Token(woosh.NAME, 'class', 376, 0, 376, 5),
woosh.Token(woosh.NAME, 'BasicInterpolation', 376, 6, 376, 24),
woosh.Token(woosh.OP, '(', 376, 24, 376, 25),
woosh.Token(woosh.NAME, 'Interpolation', 376, 25, 376, 38),
woosh.Token(woosh.OP, ')', 376, 38, 376, 39),
woosh.Token(woosh.OP, ':', 376, 39, 376, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 376, 40, 377, 0),
woosh.Token(woosh.INDENT, ' ', 377, 0, 377, 4),
woosh.Token(woosh.STRING, '"""Interpolation as implemented in the classic ConfigParser.\r\n\r\n The option values can contain format strings which refer to other values in\r\n the same section, or values in the special default section.\r\n\r\n For example:\r\n\r\n something: %(dir)s/whatever\r\n\r\n would resolve the "%(dir)s" to the value of dir. All reference\r\n expansions are done late, on demand. If a user needs to use a bare % in\r\n a configuration file, she can escape it by writing %%. Other % usage\r\n is considered a user error and raises `InterpolationSyntaxError\'."""', 377, 4, 389, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 389, 72, 390, 0),
woosh.Token(woosh.NAME, '_KEYCRE', 391, 4, 391, 11),
woosh.Token(woosh.OP, '=', 391, 12, 391, 13),
woosh.Token(woosh.NAME, 're', 391, 14, 391, 16),
woosh.Token(woosh.OP, '.', 391, 16, 391, 17),
woosh.Token(woosh.NAME, 'compile', 391, 17, 391, 24),
woosh.Token(woosh.OP, '(', 391, 24, 391, 25),
woosh.Token(woosh.STRING, 'r"%\\(([^)]+)\\)s"', 391, 25, 391, 41),
woosh.Token(woosh.OP, ')', 391, 41, 391, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 391, 42, 392, 0),
woosh.Token(woosh.NAME, 'def', 393, 4, 393, 7),
woosh.Token(woosh.NAME, 'before_get', 393, 8, 393, 18),
woosh.Token(woosh.OP, '(', 393, 18, 393, 19),
woosh.Token(woosh.NAME, 'self', 393, 19, 393, 23),
woosh.Token(woosh.OP, ',', 393, 23, 393, 24),
woosh.Token(woosh.NAME, 'parser', 393, 25, 393, 31),
woosh.Token(woosh.OP, ',', 393, 31, 393, 32),
woosh.Token(woosh.NAME, 'section', 393, 33, 393, 40),
woosh.Token(woosh.OP, ',', 393, 40, 393, 41),
woosh.Token(woosh.NAME, 'option', 393, 42, 393, 48),
woosh.Token(woosh.OP, ',', 393, 48, 393, 49),
woosh.Token(woosh.NAME, 'value', 393, 50, 393, 55),
woosh.Token(woosh.OP, ',', 393, 55, 393, 56),
woosh.Token(woosh.NAME, 'defaults', 393, 57, 393, 65),
woosh.Token(woosh.OP, ')', 393, 65, 393, 66),
woosh.Token(woosh.OP, ':', 393, 66, 393, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 393, 67, 394, 0),
woosh.Token(woosh.INDENT, ' ', 394, 0, 394, 8),
woosh.Token(woosh.NAME, 'L', 394, 8, 394, 9),
woosh.Token(woosh.OP, '=', 394, 10, 394, 11),
woosh.Token(woosh.OP, '[', 394, 12, 394, 13),
woosh.Token(woosh.OP, ']', 394, 13, 394, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 394, 14, 395, 0),
woosh.Token(woosh.NAME, 'self', 395, 8, 395, 12),
woosh.Token(woosh.OP, '.', 395, 12, 395, 13),
woosh.Token(woosh.NAME, '_interpolate_some', 395, 13, 395, 30),
woosh.Token(woosh.OP, '(', 395, 30, 395, 31),
woosh.Token(woosh.NAME, 'parser', 395, 31, 395, 37),
woosh.Token(woosh.OP, ',', 395, 37, 395, 38),
woosh.Token(woosh.NAME, 'option', 395, 39, 395, 45),
woosh.Token(woosh.OP, ',', 395, 45, 395, 46),
woosh.Token(woosh.NAME, 'L', 395, 47, 395, 48),
woosh.Token(woosh.OP, ',', 395, 48, 395, 49),
woosh.Token(woosh.NAME, 'value', 395, 50, 395, 55),
woosh.Token(woosh.OP, ',', 395, 55, 395, 56),
woosh.Token(woosh.NAME, 'section', 395, 57, 395, 64),
woosh.Token(woosh.OP, ',', 395, 64, 395, 65),
woosh.Token(woosh.NAME, 'defaults', 395, 66, 395, 74),
woosh.Token(woosh.OP, ',', 395, 74, 395, 75),
woosh.Token(woosh.NUMBER, '1', 395, 76, 395, 77),
woosh.Token(woosh.OP, ')', 395, 77, 395, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 395, 78, 396, 0),
woosh.Token(woosh.NAME, 'return', 396, 8, 396, 14),
woosh.Token(woosh.STRING, "''", 396, 15, 396, 17),
woosh.Token(woosh.OP, '.', 396, 17, 396, 18),
woosh.Token(woosh.NAME, 'join', 396, 18, 396, 22),
woosh.Token(woosh.OP, '(', 396, 22, 396, 23),
woosh.Token(woosh.NAME, 'L', 396, 23, 396, 24),
woosh.Token(woosh.OP, ')', 396, 24, 396, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 396, 25, 397, 0),
woosh.Token(woosh.DEDENT, ' ', 398, 0, 398, 4),
woosh.Token(woosh.NAME, 'def', 398, 4, 398, 7),
woosh.Token(woosh.NAME, 'before_set', 398, 8, 398, 18),
woosh.Token(woosh.OP, '(', 398, 18, 398, 19),
woosh.Token(woosh.NAME, 'self', 398, 19, 398, 23),
woosh.Token(woosh.OP, ',', 398, 23, 398, 24),
woosh.Token(woosh.NAME, 'parser', 398, 25, 398, 31),
woosh.Token(woosh.OP, ',', 398, 31, 398, 32),
woosh.Token(woosh.NAME, 'section', 398, 33, 398, 40),
woosh.Token(woosh.OP, ',', 398, 40, 398, 41),
woosh.Token(woosh.NAME, 'option', 398, 42, 398, 48),
woosh.Token(woosh.OP, ',', 398, 48, 398, 49),
woosh.Token(woosh.NAME, 'value', 398, 50, 398, 55),
woosh.Token(woosh.OP, ')', 398, 55, 398, 56),
woosh.Token(woosh.OP, ':', 398, 56, 398, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 398, 57, 399, 0),
woosh.Token(woosh.INDENT, ' ', 399, 0, 399, 8),
woosh.Token(woosh.NAME, 'tmp_value', 399, 8, 399, 17),
woosh.Token(woosh.OP, '=', 399, 18, 399, 19),
woosh.Token(woosh.NAME, 'value', 399, 20, 399, 25),
woosh.Token(woosh.OP, '.', 399, 25, 399, 26),
woosh.Token(woosh.NAME, 'replace', 399, 26, 399, 33),
woosh.Token(woosh.OP, '(', 399, 33, 399, 34),
woosh.Token(woosh.STRING, "'%%'", 399, 34, 399, 38),
woosh.Token(woosh.OP, ',', 399, 38, 399, 39),
woosh.Token(woosh.STRING, "''", 399, 40, 399, 42),
woosh.Token(woosh.OP, ')', 399, 42, 399, 43),
woosh.Token(woosh.COMMENT, '# escaped percent signs', 399, 44, 399, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 399, 67, 400, 0),
woosh.Token(woosh.NAME, 'tmp_value', 400, 8, 400, 17),
woosh.Token(woosh.OP, '=', 400, 18, 400, 19),
woosh.Token(woosh.NAME, 'self', 400, 20, 400, 24),
woosh.Token(woosh.OP, '.', 400, 24, 400, 25),
woosh.Token(woosh.NAME, '_KEYCRE', 400, 25, 400, 32),
woosh.Token(woosh.OP, '.', 400, 32, 400, 33),
woosh.Token(woosh.NAME, 'sub', 400, 33, 400, 36),
woosh.Token(woosh.OP, '(', 400, 36, 400, 37),
woosh.Token(woosh.STRING, "''", 400, 37, 400, 39),
woosh.Token(woosh.OP, ',', 400, 39, 400, 40),
woosh.Token(woosh.NAME, 'tmp_value', 400, 41, 400, 50),
woosh.Token(woosh.OP, ')', 400, 50, 400, 51),
woosh.Token(woosh.COMMENT, '# valid syntax', 400, 52, 400, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 400, 66, 401, 0),
woosh.Token(woosh.NAME, 'if', 401, 8, 401, 10),
woosh.Token(woosh.STRING, "'%'", 401, 11, 401, 14),
woosh.Token(woosh.NAME, 'in', 401, 15, 401, 17),
woosh.Token(woosh.NAME, 'tmp_value', 401, 18, 401, 27),
woosh.Token(woosh.OP, ':', 401, 27, 401, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 401, 28, 402, 0),
woosh.Token(woosh.INDENT, ' ', 402, 0, 402, 12),
woosh.Token(woosh.NAME, 'raise', 402, 12, 402, 17),
woosh.Token(woosh.NAME, 'ValueError', 402, 18, 402, 28),
woosh.Token(woosh.OP, '(', 402, 28, 402, 29),
woosh.Token(woosh.STRING, '"invalid interpolation syntax in %r at "', 402, 29, 402, 69),
woosh.Token(woosh.STRING, '"position %d"', 403, 29, 403, 42),
woosh.Token(woosh.OP, '%', 403, 43, 403, 44),
woosh.Token(woosh.OP, '(', 403, 45, 403, 46),
woosh.Token(woosh.NAME, 'value', 403, 46, 403, 51),
woosh.Token(woosh.OP, ',', 403, 51, 403, 52),
woosh.Token(woosh.NAME, 'tmp_value', 403, 53, 403, 62),
woosh.Token(woosh.OP, '.', 403, 62, 403, 63),
woosh.Token(woosh.NAME, 'find', 403, 63, 403, 67),
woosh.Token(woosh.OP, '(', 403, 67, 403, 68),
woosh.Token(woosh.STRING, "'%'", 403, 68, 403, 71),
woosh.Token(woosh.OP, ')', 403, 71, 403, 72),
woosh.Token(woosh.OP, ')', 403, 72, 403, 73),
woosh.Token(woosh.OP, ')', 403, 73, 403, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 403, 74, 404, 0),
woosh.Token(woosh.DEDENT, ' ', 404, 0, 404, 8),
woosh.Token(woosh.NAME, 'return', 404, 8, 404, 14),
woosh.Token(woosh.NAME, 'value', 404, 15, 404, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 404, 20, 405, 0),
woosh.Token(woosh.DEDENT, ' ', 406, 0, 406, 4),
woosh.Token(woosh.NAME, 'def', 406, 4, 406, 7),
woosh.Token(woosh.NAME, '_interpolate_some', 406, 8, 406, 25),
woosh.Token(woosh.OP, '(', 406, 25, 406, 26),
woosh.Token(woosh.NAME, 'self', 406, 26, 406, 30),
woosh.Token(woosh.OP, ',', 406, 30, 406, 31),
woosh.Token(woosh.NAME, 'parser', 406, 32, 406, 38),
woosh.Token(woosh.OP, ',', 406, 38, 406, 39),
woosh.Token(woosh.NAME, 'option', 406, 40, 406, 46),
woosh.Token(woosh.OP, ',', 406, 46, 406, 47),
woosh.Token(woosh.NAME, 'accum', 406, 48, 406, 53),
woosh.Token(woosh.OP, ',', 406, 53, 406, 54),
woosh.Token(woosh.NAME, 'rest', 406, 55, 406, 59),
woosh.Token(woosh.OP, ',', 406, 59, 406, 60),
woosh.Token(woosh.NAME, 'section', 406, 61, 406, 68),
woosh.Token(woosh.OP, ',', 406, 68, 406, 69),
woosh.Token(woosh.NAME, 'map', 406, 70, 406, 73),
woosh.Token(woosh.OP, ',', 406, 73, 406, 74),
woosh.Token(woosh.NAME, 'depth', 407, 26, 407, 31),
woosh.Token(woosh.OP, ')', 407, 31, 407, 32),
woosh.Token(woosh.OP, ':', 407, 32, 407, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 407, 33, 408, 0),
woosh.Token(woosh.INDENT, ' ', 408, 0, 408, 8),
woosh.Token(woosh.NAME, 'rawval', 408, 8, 408, 14),
woosh.Token(woosh.OP, '=', 408, 15, 408, 16),
woosh.Token(woosh.NAME, 'parser', 408, 17, 408, 23),
woosh.Token(woosh.OP, '.', 408, 23, 408, 24),
woosh.Token(woosh.NAME, 'get', 408, 24, 408, 27),
woosh.Token(woosh.OP, '(', 408, 27, 408, 28),
woosh.Token(woosh.NAME, 'section', 408, 28, 408, 35),
woosh.Token(woosh.OP, ',', 408, 35, 408, 36),
woosh.Token(woosh.NAME, 'option', 408, 37, 408, 43),
woosh.Token(woosh.OP, ',', 408, 43, 408, 44),
woosh.Token(woosh.NAME, 'raw', 408, 45, 408, 48),
woosh.Token(woosh.OP, '=', 408, 48, 408, 49),
woosh.Token(woosh.NAME, 'True', 408, 49, 408, 53),
woosh.Token(woosh.OP, ',', 408, 53, 408, 54),
woosh.Token(woosh.NAME, 'fallback', 408, 55, 408, 63),
woosh.Token(woosh.OP, '=', 408, 63, 408, 64),
woosh.Token(woosh.NAME, 'rest', 408, 64, 408, 68),
woosh.Token(woosh.OP, ')', 408, 68, 408, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 408, 69, 409, 0),
woosh.Token(woosh.NAME, 'if', 409, 8, 409, 10),
woosh.Token(woosh.NAME, 'depth', 409, 11, 409, 16),
woosh.Token(woosh.OP, '>', 409, 17, 409, 18),
woosh.Token(woosh.NAME, 'MAX_INTERPOLATION_DEPTH', 409, 19, 409, 42),
woosh.Token(woosh.OP, ':', 409, 42, 409, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 409, 43, 410, 0),
woosh.Token(woosh.INDENT, ' ', 410, 0, 410, 12),
woosh.Token(woosh.NAME, 'raise', 410, 12, 410, 17),
woosh.Token(woosh.NAME, 'InterpolationDepthError', 410, 18, 410, 41),
woosh.Token(woosh.OP, '(', 410, 41, 410, 42),
woosh.Token(woosh.NAME, 'option', 410, 42, 410, 48),
woosh.Token(woosh.OP, ',', 410, 48, 410, 49),
woosh.Token(woosh.NAME, 'section', 410, 50, 410, 57),
woosh.Token(woosh.OP, ',', 410, 57, 410, 58),
woosh.Token(woosh.NAME, 'rawval', 410, 59, 410, 65),
woosh.Token(woosh.OP, ')', 410, 65, 410, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 410, 66, 411, 0),
woosh.Token(woosh.DEDENT, ' ', 411, 0, 411, 8),
woosh.Token(woosh.NAME, 'while', 411, 8, 411, 13),
woosh.Token(woosh.NAME, 'rest', 411, 14, 411, 18),
woosh.Token(woosh.OP, ':', 411, 18, 411, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 411, 19, 412, 0),
woosh.Token(woosh.INDENT, ' ', 412, 0, 412, 12),
woosh.Token(woosh.NAME, 'p', 412, 12, 412, 13),
woosh.Token(woosh.OP, '=', 412, 14, 412, 15),
woosh.Token(woosh.NAME, 'rest', 412, 16, 412, 20),
woosh.Token(woosh.OP, '.', 412, 20, 412, 21),
woosh.Token(woosh.NAME, 'find', 412, 21, 412, 25),
woosh.Token(woosh.OP, '(', 412, 25, 412, 26),
woosh.Token(woosh.STRING, '"%"', 412, 26, 412, 29),
woosh.Token(woosh.OP, ')', 412, 29, 412, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 412, 30, 413, 0),
woosh.Token(woosh.NAME, 'if', 413, 12, 413, 14),
woosh.Token(woosh.NAME, 'p', 413, 15, 413, 16),
woosh.Token(woosh.OP, '<', 413, 17, 413, 18),
woosh.Token(woosh.NUMBER, '0', 413, 19, 413, 20),
woosh.Token(woosh.OP, ':', 413, 20, 413, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 413, 21, 414, 0),
woosh.Token(woosh.INDENT, ' ', 414, 0, 414, 16),
woosh.Token(woosh.NAME, 'accum', 414, 16, 414, 21),
woosh.Token(woosh.OP, '.', 414, 21, 414, 22),
woosh.Token(woosh.NAME, 'append', 414, 22, 414, 28),
woosh.Token(woosh.OP, '(', 414, 28, 414, 29),
woosh.Token(woosh.NAME, 'rest', 414, 29, 414, 33),
woosh.Token(woosh.OP, ')', 414, 33, 414, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 414, 34, 415, 0),
woosh.Token(woosh.NAME, 'return', 415, 16, 415, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 415, 22, 416, 0),
woosh.Token(woosh.DEDENT, ' ', 416, 0, 416, 12),
woosh.Token(woosh.NAME, 'if', 416, 12, 416, 14),
woosh.Token(woosh.NAME, 'p', 416, 15, 416, 16),
woosh.Token(woosh.OP, '>', 416, 17, 416, 18),
woosh.Token(woosh.NUMBER, '0', 416, 19, 416, 20),
woosh.Token(woosh.OP, ':', 416, 20, 416, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 416, 21, 417, 0),
woosh.Token(woosh.INDENT, ' ', 417, 0, 417, 16),
woosh.Token(woosh.NAME, 'accum', 417, 16, 417, 21),
woosh.Token(woosh.OP, '.', 417, 21, 417, 22),
woosh.Token(woosh.NAME, 'append', 417, 22, 417, 28),
woosh.Token(woosh.OP, '(', 417, 28, 417, 29),
woosh.Token(woosh.NAME, 'rest', 417, 29, 417, 33),
woosh.Token(woosh.OP, '[', 417, 33, 417, 34),
woosh.Token(woosh.OP, ':', 417, 34, 417, 35),
woosh.Token(woosh.NAME, 'p', 417, 35, 417, 36),
woosh.Token(woosh.OP, ']', 417, 36, 417, 37),
woosh.Token(woosh.OP, ')', 417, 37, 417, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 417, 38, 418, 0),
woosh.Token(woosh.NAME, 'rest', 418, 16, 418, 20),
woosh.Token(woosh.OP, '=', 418, 21, 418, 22),
woosh.Token(woosh.NAME, 'rest', 418, 23, 418, 27),
woosh.Token(woosh.OP, '[', 418, 27, 418, 28),
woosh.Token(woosh.NAME, 'p', 418, 28, 418, 29),
woosh.Token(woosh.OP, ':', 418, 29, 418, 30),
woosh.Token(woosh.OP, ']', 418, 30, 418, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 418, 31, 419, 0),
woosh.Token(woosh.COMMENT, '# p is no longer used', 419, 12, 419, 33),
woosh.Token(woosh.DEDENT, ' ', 420, 0, 420, 12),
woosh.Token(woosh.NAME, 'c', 420, 12, 420, 13),
woosh.Token(woosh.OP, '=', 420, 14, 420, 15),
woosh.Token(woosh.NAME, 'rest', 420, 16, 420, 20),
woosh.Token(woosh.OP, '[', 420, 20, 420, 21),
woosh.Token(woosh.NUMBER, '1', 420, 21, 420, 22),
woosh.Token(woosh.OP, ':', 420, 22, 420, 23),
woosh.Token(woosh.NUMBER, '2', 420, 23, 420, 24),
woosh.Token(woosh.OP, ']', 420, 24, 420, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 420, 25, 421, 0),
woosh.Token(woosh.NAME, 'if', 421, 12, 421, 14),
woosh.Token(woosh.NAME, 'c', 421, 15, 421, 16),
woosh.Token(woosh.OP, '==', 421, 17, 421, 19),
woosh.Token(woosh.STRING, '"%"', 421, 20, 421, 23),
woosh.Token(woosh.OP, ':', 421, 23, 421, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 421, 24, 422, 0),
woosh.Token(woosh.INDENT, ' ', 422, 0, 422, 16),
woosh.Token(woosh.NAME, 'accum', 422, 16, 422, 21),
woosh.Token(woosh.OP, '.', 422, 21, 422, 22),
woosh.Token(woosh.NAME, 'append', 422, 22, 422, 28),
woosh.Token(woosh.OP, '(', 422, 28, 422, 29),
woosh.Token(woosh.STRING, '"%"', 422, 29, 422, 32),
woosh.Token(woosh.OP, ')', 422, 32, 422, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 422, 33, 423, 0),
woosh.Token(woosh.NAME, 'rest', 423, 16, 423, 20),
woosh.Token(woosh.OP, '=', 423, 21, 423, 22),
woosh.Token(woosh.NAME, 'rest', 423, 23, 423, 27),
woosh.Token(woosh.OP, '[', 423, 27, 423, 28),
woosh.Token(woosh.NUMBER, '2', 423, 28, 423, 29),
woosh.Token(woosh.OP, ':', 423, 29, 423, 30),
woosh.Token(woosh.OP, ']', 423, 30, 423, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 423, 31, 424, 0),
woosh.Token(woosh.DEDENT, ' ', 424, 0, 424, 12),
woosh.Token(woosh.NAME, 'elif', 424, 12, 424, 16),
woosh.Token(woosh.NAME, 'c', 424, 17, 424, 18),
woosh.Token(woosh.OP, '==', 424, 19, 424, 21),
woosh.Token(woosh.STRING, '"("', 424, 22, 424, 25),
woosh.Token(woosh.OP, ':', 424, 25, 424, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 424, 26, 425, 0),
woosh.Token(woosh.INDENT, ' ', 425, 0, 425, 16),
woosh.Token(woosh.NAME, 'm', 425, 16, 425, 17),
woosh.Token(woosh.OP, '=', 425, 18, 425, 19),
woosh.Token(woosh.NAME, 'self', 425, 20, 425, 24),
woosh.Token(woosh.OP, '.', 425, 24, 425, 25),
woosh.Token(woosh.NAME, '_KEYCRE', 425, 25, 425, 32),
woosh.Token(woosh.OP, '.', 425, 32, 425, 33),
woosh.Token(woosh.NAME, 'match', 425, 33, 425, 38),
woosh.Token(woosh.OP, '(', 425, 38, 425, 39),
woosh.Token(woosh.NAME, 'rest', 425, 39, 425, 43),
woosh.Token(woosh.OP, ')', 425, 43, 425, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 425, 44, 426, 0),
woosh.Token(woosh.NAME, 'if', 426, 16, 426, 18),
woosh.Token(woosh.NAME, 'm', 426, 19, 426, 20),
woosh.Token(woosh.NAME, 'is', 426, 21, 426, 23),
woosh.Token(woosh.NAME, 'None', 426, 24, 426, 28),
woosh.Token(woosh.OP, ':', 426, 28, 426, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 426, 29, 427, 0),
woosh.Token(woosh.INDENT, ' ', 427, 0, 427, 20),
woosh.Token(woosh.NAME, 'raise', 427, 20, 427, 25),
woosh.Token(woosh.NAME, 'InterpolationSyntaxError', 427, 26, 427, 50),
woosh.Token(woosh.OP, '(', 427, 50, 427, 51),
woosh.Token(woosh.NAME, 'option', 427, 51, 427, 57),
woosh.Token(woosh.OP, ',', 427, 57, 427, 58),
woosh.Token(woosh.NAME, 'section', 427, 59, 427, 66),
woosh.Token(woosh.OP, ',', 427, 66, 427, 67),
woosh.Token(woosh.STRING, '"bad interpolation variable reference %r"', 428, 24, 428, 65),
woosh.Token(woosh.OP, '%', 428, 66, 428, 67),
woosh.Token(woosh.NAME, 'rest', 428, 68, 428, 72),
woosh.Token(woosh.OP, ')', 428, 72, 428, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 428, 73, 429, 0),
woosh.Token(woosh.DEDENT, ' ', 429, 0, 429, 16),
woosh.Token(woosh.NAME, 'var', 429, 16, 429, 19),
woosh.Token(woosh.OP, '=', 429, 20, 429, 21),
woosh.Token(woosh.NAME, 'parser', 429, 22, 429, 28),
woosh.Token(woosh.OP, '.', 429, 28, 429, 29),
woosh.Token(woosh.NAME, 'optionxform', 429, 29, 429, 40),
woosh.Token(woosh.OP, '(', 429, 40, 429, 41),
woosh.Token(woosh.NAME, 'm', 429, 41, 429, 42),
woosh.Token(woosh.OP, '.', 429, 42, 429, 43),
woosh.Token(woosh.NAME, 'group', 429, 43, 429, 48),
woosh.Token(woosh.OP, '(', 429, 48, 429, 49),
woosh.Token(woosh.NUMBER, '1', 429, 49, 429, 50),
woosh.Token(woosh.OP, ')', 429, 50, 429, 51),
woosh.Token(woosh.OP, ')', 429, 51, 429, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 429, 52, 430, 0),
woosh.Token(woosh.NAME, 'rest', 430, 16, 430, 20),
woosh.Token(woosh.OP, '=', 430, 21, 430, 22),
woosh.Token(woosh.NAME, 'rest', 430, 23, 430, 27),
woosh.Token(woosh.OP, '[', 430, 27, 430, 28),
woosh.Token(woosh.NAME, 'm', 430, 28, 430, 29),
woosh.Token(woosh.OP, '.', 430, 29, 430, 30),
woosh.Token(woosh.NAME, 'end', 430, 30, 430, 33),
woosh.Token(woosh.OP, '(', 430, 33, 430, 34),
woosh.Token(woosh.OP, ')', 430, 34, 430, 35),
woosh.Token(woosh.OP, ':', 430, 35, 430, 36),
woosh.Token(woosh.OP, ']', 430, 36, 430, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 430, 37, 431, 0),
woosh.Token(woosh.NAME, 'try', 431, 16, 431, 19),
woosh.Token(woosh.OP, ':', 431, 19, 431, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 431, 20, 432, 0),
woosh.Token(woosh.INDENT, ' ', 432, 0, 432, 20),
woosh.Token(woosh.NAME, 'v', 432, 20, 432, 21),
woosh.Token(woosh.OP, '=', 432, 22, 432, 23),
woosh.Token(woosh.NAME, 'map', 432, 24, 432, 27),
woosh.Token(woosh.OP, '[', 432, 27, 432, 28),
woosh.Token(woosh.NAME, 'var', 432, 28, 432, 31),
woosh.Token(woosh.OP, ']', 432, 31, 432, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 432, 32, 433, 0),
woosh.Token(woosh.DEDENT, ' ', 433, 0, 433, 16),
woosh.Token(woosh.NAME, 'except', 433, 16, 433, 22),
woosh.Token(woosh.NAME, 'KeyError', 433, 23, 433, 31),
woosh.Token(woosh.OP, ':', 433, 31, 433, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 433, 32, 434, 0),
woosh.Token(woosh.INDENT, ' ', 434, 0, 434, 20),
woosh.Token(woosh.NAME, 'raise', 434, 20, 434, 25),
woosh.Token(woosh.NAME, 'InterpolationMissingOptionError', 434, 26, 434, 57),
woosh.Token(woosh.OP, '(', 434, 57, 434, 58),
woosh.Token(woosh.NAME, 'option', 435, 24, 435, 30),
woosh.Token(woosh.OP, ',', 435, 30, 435, 31),
woosh.Token(woosh.NAME, 'section', 435, 32, 435, 39),
woosh.Token(woosh.OP, ',', 435, 39, 435, 40),
woosh.Token(woosh.NAME, 'rawval', 435, 41, 435, 47),
woosh.Token(woosh.OP, ',', 435, 47, 435, 48),
woosh.Token(woosh.NAME, 'var', 435, 49, 435, 52),
woosh.Token(woosh.OP, ')', 435, 52, 435, 53),
woosh.Token(woosh.NAME, 'from', 435, 54, 435, 58),
woosh.Token(woosh.NAME, 'None', 435, 59, 435, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 435, 63, 436, 0),
woosh.Token(woosh.DEDENT, ' ', 436, 0, 436, 16),
woosh.Token(woosh.NAME, 'if', 436, 16, 436, 18),
woosh.Token(woosh.STRING, '"%"', 436, 19, 436, 22),
woosh.Token(woosh.NAME, 'in', 436, 23, 436, 25),
woosh.Token(woosh.NAME, 'v', 436, 26, 436, 27),
woosh.Token(woosh.OP, ':', 436, 27, 436, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 436, 28, 437, 0),
woosh.Token(woosh.INDENT, ' ', 437, 0, 437, 20),
woosh.Token(woosh.NAME, 'self', 437, 20, 437, 24),
woosh.Token(woosh.OP, '.', 437, 24, 437, 25),
woosh.Token(woosh.NAME, '_interpolate_some', 437, 25, 437, 42),
woosh.Token(woosh.OP, '(', 437, 42, 437, 43),
woosh.Token(woosh.NAME, 'parser', 437, 43, 437, 49),
woosh.Token(woosh.OP, ',', 437, 49, 437, 50),
woosh.Token(woosh.NAME, 'option', 437, 51, 437, 57),
woosh.Token(woosh.OP, ',', 437, 57, 437, 58),
woosh.Token(woosh.NAME, 'accum', 437, 59, 437, 64),
woosh.Token(woosh.OP, ',', 437, 64, 437, 65),
woosh.Token(woosh.NAME, 'v', 437, 66, 437, 67),
woosh.Token(woosh.OP, ',', 437, 67, 437, 68),
woosh.Token(woosh.NAME, 'section', 438, 43, 438, 50),
woosh.Token(woosh.OP, ',', 438, 50, 438, 51),
woosh.Token(woosh.NAME, 'map', 438, 52, 438, 55),
woosh.Token(woosh.OP, ',', 438, 55, 438, 56),
woosh.Token(woosh.NAME, 'depth', 438, 57, 438, 62),
woosh.Token(woosh.OP, '+', 438, 63, 438, 64),
woosh.Token(woosh.NUMBER, '1', 438, 65, 438, 66),
woosh.Token(woosh.OP, ')', 438, 66, 438, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 438, 67, 439, 0),
woosh.Token(woosh.DEDENT, ' ', 439, 0, 439, 16),
woosh.Token(woosh.NAME, 'else', 439, 16, 439, 20),
woosh.Token(woosh.OP, ':', 439, 20, 439, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 439, 21, 440, 0),
woosh.Token(woosh.INDENT, ' ', 440, 0, 440, 20),
woosh.Token(woosh.NAME, 'accum', 440, 20, 440, 25),
woosh.Token(woosh.OP, '.', 440, 25, 440, 26),
woosh.Token(woosh.NAME, 'append', 440, 26, 440, 32),
woosh.Token(woosh.OP, '(', 440, 32, 440, 33),
woosh.Token(woosh.NAME, 'v', 440, 33, 440, 34),
woosh.Token(woosh.OP, ')', 440, 34, 440, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 440, 35, 441, 0),
woosh.Token(woosh.DEDENT, ' ', 441, 0, 441, 12),
woosh.Token(woosh.DEDENT, '', 441, 12, 441, 12),
woosh.Token(woosh.NAME, 'else', 441, 12, 441, 16),
woosh.Token(woosh.OP, ':', 441, 16, 441, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 441, 17, 442, 0),
woosh.Token(woosh.INDENT, ' ', 442, 0, 442, 16),
woosh.Token(woosh.NAME, 'raise', 442, 16, 442, 21),
woosh.Token(woosh.NAME, 'InterpolationSyntaxError', 442, 22, 442, 46),
woosh.Token(woosh.OP, '(', 442, 46, 442, 47),
woosh.Token(woosh.NAME, 'option', 443, 20, 443, 26),
woosh.Token(woosh.OP, ',', 443, 26, 443, 27),
woosh.Token(woosh.NAME, 'section', 443, 28, 443, 35),
woosh.Token(woosh.OP, ',', 443, 35, 443, 36),
woosh.Token(woosh.STRING, '"\'%%\' must be followed by \'%%\' or \'(\', "', 444, 20, 444, 60),
woosh.Token(woosh.STRING, '"found: %r"', 445, 20, 445, 31),
woosh.Token(woosh.OP, '%', 445, 32, 445, 33),
woosh.Token(woosh.OP, '(', 445, 34, 445, 35),
woosh.Token(woosh.NAME, 'rest', 445, 35, 445, 39),
woosh.Token(woosh.OP, ',', 445, 39, 445, 40),
woosh.Token(woosh.OP, ')', 445, 40, 445, 41),
woosh.Token(woosh.OP, ')', 445, 41, 445, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 445, 42, 446, 0),
woosh.Token(woosh.DEDENT, '', 448, 0, 448, 0),
woosh.Token(woosh.DEDENT, '', 448, 0, 448, 0),
woosh.Token(woosh.DEDENT, '', 448, 0, 448, 0),
woosh.Token(woosh.DEDENT, '', 448, 0, 448, 0),
woosh.Token(woosh.NAME, 'class', 448, 0, 448, 5),
woosh.Token(woosh.NAME, 'ExtendedInterpolation', 448, 6, 448, 27),
woosh.Token(woosh.OP, '(', 448, 27, 448, 28),
woosh.Token(woosh.NAME, 'Interpolation', 448, 28, 448, 41),
woosh.Token(woosh.OP, ')', 448, 41, 448, 42),
woosh.Token(woosh.OP, ':', 448, 42, 448, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 448, 43, 449, 0),
woosh.Token(woosh.INDENT, ' ', 449, 0, 449, 4),
woosh.Token(woosh.STRING, '"""Advanced variant of interpolation, supports the syntax used by\r\n `zc.buildout\'. Enables interpolation between sections."""', 449, 4, 450, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 450, 61, 451, 0),
woosh.Token(woosh.NAME, '_KEYCRE', 452, 4, 452, 11),
woosh.Token(woosh.OP, '=', 452, 12, 452, 13),
woosh.Token(woosh.NAME, 're', 452, 14, 452, 16),
woosh.Token(woosh.OP, '.', 452, 16, 452, 17),
woosh.Token(woosh.NAME, 'compile', 452, 17, 452, 24),
woosh.Token(woosh.OP, '(', 452, 24, 452, 25),
woosh.Token(woosh.STRING, 'r"\\$\\{([^}]+)\\}"', 452, 25, 452, 41),
woosh.Token(woosh.OP, ')', 452, 41, 452, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 452, 42, 453, 0),
woosh.Token(woosh.NAME, 'def', 454, 4, 454, 7),
woosh.Token(woosh.NAME, 'before_get', 454, 8, 454, 18),
woosh.Token(woosh.OP, '(', 454, 18, 454, 19),
woosh.Token(woosh.NAME, 'self', 454, 19, 454, 23),
woosh.Token(woosh.OP, ',', 454, 23, 454, 24),
woosh.Token(woosh.NAME, 'parser', 454, 25, 454, 31),
woosh.Token(woosh.OP, ',', 454, 31, 454, 32),
woosh.Token(woosh.NAME, 'section', 454, 33, 454, 40),
woosh.Token(woosh.OP, ',', 454, 40, 454, 41),
woosh.Token(woosh.NAME, 'option', 454, 42, 454, 48),
woosh.Token(woosh.OP, ',', 454, 48, 454, 49),
woosh.Token(woosh.NAME, 'value', 454, 50, 454, 55),
woosh.Token(woosh.OP, ',', 454, 55, 454, 56),
woosh.Token(woosh.NAME, 'defaults', 454, 57, 454, 65),
woosh.Token(woosh.OP, ')', 454, 65, 454, 66),
woosh.Token(woosh.OP, ':', 454, 66, 454, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 454, 67, 455, 0),
woosh.Token(woosh.INDENT, ' ', 455, 0, 455, 8),
woosh.Token(woosh.NAME, 'L', 455, 8, 455, 9),
woosh.Token(woosh.OP, '=', 455, 10, 455, 11),
woosh.Token(woosh.OP, '[', 455, 12, 455, 13),
woosh.Token(woosh.OP, ']', 455, 13, 455, 14),
woosh.Token(woosh.NEWLINE, '\r\n', 455, 14, 456, 0),
woosh.Token(woosh.NAME, 'self', 456, 8, 456, 12),
woosh.Token(woosh.OP, '.', 456, 12, 456, 13),
woosh.Token(woosh.NAME, '_interpolate_some', 456, 13, 456, 30),
woosh.Token(woosh.OP, '(', 456, 30, 456, 31),
woosh.Token(woosh.NAME, 'parser', 456, 31, 456, 37),
woosh.Token(woosh.OP, ',', 456, 37, 456, 38),
woosh.Token(woosh.NAME, 'option', 456, 39, 456, 45),
woosh.Token(woosh.OP, ',', 456, 45, 456, 46),
woosh.Token(woosh.NAME, 'L', 456, 47, 456, 48),
woosh.Token(woosh.OP, ',', 456, 48, 456, 49),
woosh.Token(woosh.NAME, 'value', 456, 50, 456, 55),
woosh.Token(woosh.OP, ',', 456, 55, 456, 56),
woosh.Token(woosh.NAME, 'section', 456, 57, 456, 64),
woosh.Token(woosh.OP, ',', 456, 64, 456, 65),
woosh.Token(woosh.NAME, 'defaults', 456, 66, 456, 74),
woosh.Token(woosh.OP, ',', 456, 74, 456, 75),
woosh.Token(woosh.NUMBER, '1', 456, 76, 456, 77),
woosh.Token(woosh.OP, ')', 456, 77, 456, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 456, 78, 457, 0),
woosh.Token(woosh.NAME, 'return', 457, 8, 457, 14),
woosh.Token(woosh.STRING, "''", 457, 15, 457, 17),
woosh.Token(woosh.OP, '.', 457, 17, 457, 18),
woosh.Token(woosh.NAME, 'join', 457, 18, 457, 22),
woosh.Token(woosh.OP, '(', 457, 22, 457, 23),
woosh.Token(woosh.NAME, 'L', 457, 23, 457, 24),
woosh.Token(woosh.OP, ')', 457, 24, 457, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 457, 25, 458, 0),
woosh.Token(woosh.DEDENT, ' ', 459, 0, 459, 4),
woosh.Token(woosh.NAME, 'def', 459, 4, 459, 7),
woosh.Token(woosh.NAME, 'before_set', 459, 8, 459, 18),
woosh.Token(woosh.OP, '(', 459, 18, 459, 19),
woosh.Token(woosh.NAME, 'self', 459, 19, 459, 23),
woosh.Token(woosh.OP, ',', 459, 23, 459, 24),
woosh.Token(woosh.NAME, 'parser', 459, 25, 459, 31),
woosh.Token(woosh.OP, ',', 459, 31, 459, 32),
woosh.Token(woosh.NAME, 'section', 459, 33, 459, 40),
woosh.Token(woosh.OP, ',', 459, 40, 459, 41),
woosh.Token(woosh.NAME, 'option', 459, 42, 459, 48),
woosh.Token(woosh.OP, ',', 459, 48, 459, 49),
woosh.Token(woosh.NAME, 'value', 459, 50, 459, 55),
woosh.Token(woosh.OP, ')', 459, 55, 459, 56),
woosh.Token(woosh.OP, ':', 459, 56, 459, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 459, 57, 460, 0),
woosh.Token(woosh.INDENT, ' ', 460, 0, 460, 8),
woosh.Token(woosh.NAME, 'tmp_value', 460, 8, 460, 17),
woosh.Token(woosh.OP, '=', 460, 18, 460, 19),
woosh.Token(woosh.NAME, 'value', 460, 20, 460, 25),
woosh.Token(woosh.OP, '.', 460, 25, 460, 26),
woosh.Token(woosh.NAME, 'replace', 460, 26, 460, 33),
woosh.Token(woosh.OP, '(', 460, 33, 460, 34),
woosh.Token(woosh.STRING, "'$$'", 460, 34, 460, 38),
woosh.Token(woosh.OP, ',', 460, 38, 460, 39),
woosh.Token(woosh.STRING, "''", 460, 40, 460, 42),
woosh.Token(woosh.OP, ')', 460, 42, 460, 43),
woosh.Token(woosh.COMMENT, '# escaped dollar signs', 460, 44, 460, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 460, 66, 461, 0),
woosh.Token(woosh.NAME, 'tmp_value', 461, 8, 461, 17),
woosh.Token(woosh.OP, '=', 461, 18, 461, 19),
woosh.Token(woosh.NAME, 'self', 461, 20, 461, 24),
woosh.Token(woosh.OP, '.', 461, 24, 461, 25),
woosh.Token(woosh.NAME, '_KEYCRE', 461, 25, 461, 32),
woosh.Token(woosh.OP, '.', 461, 32, 461, 33),
woosh.Token(woosh.NAME, 'sub', 461, 33, 461, 36),
woosh.Token(woosh.OP, '(', 461, 36, 461, 37),
woosh.Token(woosh.STRING, "''", 461, 37, 461, 39),
woosh.Token(woosh.OP, ',', 461, 39, 461, 40),
woosh.Token(woosh.NAME, 'tmp_value', 461, 41, 461, 50),
woosh.Token(woosh.OP, ')', 461, 50, 461, 51),
woosh.Token(woosh.COMMENT, '# valid syntax', 461, 52, 461, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 461, 66, 462, 0),
woosh.Token(woosh.NAME, 'if', 462, 8, 462, 10),
woosh.Token(woosh.STRING, "'$'", 462, 11, 462, 14),
woosh.Token(woosh.NAME, 'in', 462, 15, 462, 17),
woosh.Token(woosh.NAME, 'tmp_value', 462, 18, 462, 27),
woosh.Token(woosh.OP, ':', 462, 27, 462, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 462, 28, 463, 0),
woosh.Token(woosh.INDENT, ' ', 463, 0, 463, 12),
woosh.Token(woosh.NAME, 'raise', 463, 12, 463, 17),
woosh.Token(woosh.NAME, 'ValueError', 463, 18, 463, 28),
woosh.Token(woosh.OP, '(', 463, 28, 463, 29),
woosh.Token(woosh.STRING, '"invalid interpolation syntax in %r at "', 463, 29, 463, 69),
woosh.Token(woosh.STRING, '"position %d"', 464, 29, 464, 42),
woosh.Token(woosh.OP, '%', 464, 43, 464, 44),
woosh.Token(woosh.OP, '(', 464, 45, 464, 46),
woosh.Token(woosh.NAME, 'value', 464, 46, 464, 51),
woosh.Token(woosh.OP, ',', 464, 51, 464, 52),
woosh.Token(woosh.NAME, 'tmp_value', 464, 53, 464, 62),
woosh.Token(woosh.OP, '.', 464, 62, 464, 63),
woosh.Token(woosh.NAME, 'find', 464, 63, 464, 67),
woosh.Token(woosh.OP, '(', 464, 67, 464, 68),
woosh.Token(woosh.STRING, "'$'", 464, 68, 464, 71),
woosh.Token(woosh.OP, ')', 464, 71, 464, 72),
woosh.Token(woosh.OP, ')', 464, 72, 464, 73),
woosh.Token(woosh.OP, ')', 464, 73, 464, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 464, 74, 465, 0),
woosh.Token(woosh.DEDENT, ' ', 465, 0, 465, 8),
woosh.Token(woosh.NAME, 'return', 465, 8, 465, 14),
woosh.Token(woosh.NAME, 'value', 465, 15, 465, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 465, 20, 466, 0),
woosh.Token(woosh.DEDENT, ' ', 467, 0, 467, 4),
woosh.Token(woosh.NAME, 'def', 467, 4, 467, 7),
woosh.Token(woosh.NAME, '_interpolate_some', 467, 8, 467, 25),
woosh.Token(woosh.OP, '(', 467, 25, 467, 26),
woosh.Token(woosh.NAME, 'self', 467, 26, 467, 30),
woosh.Token(woosh.OP, ',', 467, 30, 467, 31),
woosh.Token(woosh.NAME, 'parser', 467, 32, 467, 38),
woosh.Token(woosh.OP, ',', 467, 38, 467, 39),
woosh.Token(woosh.NAME, 'option', 467, 40, 467, 46),
woosh.Token(woosh.OP, ',', 467, 46, 467, 47),
woosh.Token(woosh.NAME, 'accum', 467, 48, 467, 53),
woosh.Token(woosh.OP, ',', 467, 53, 467, 54),
woosh.Token(woosh.NAME, 'rest', 467, 55, 467, 59),
woosh.Token(woosh.OP, ',', 467, 59, 467, 60),
woosh.Token(woosh.NAME, 'section', 467, 61, 467, 68),
woosh.Token(woosh.OP, ',', 467, 68, 467, 69),
woosh.Token(woosh.NAME, 'map', 467, 70, 467, 73),
woosh.Token(woosh.OP, ',', 467, 73, 467, 74),
woosh.Token(woosh.NAME, 'depth', 468, 26, 468, 31),
woosh.Token(woosh.OP, ')', 468, 31, 468, 32),
woosh.Token(woosh.OP, ':', 468, 32, 468, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 468, 33, 469, 0),
woosh.Token(woosh.INDENT, ' ', 469, 0, 469, 8),
woosh.Token(woosh.NAME, 'rawval', 469, 8, 469, 14),
woosh.Token(woosh.OP, '=', 469, 15, 469, 16),
woosh.Token(woosh.NAME, 'parser', 469, 17, 469, 23),
woosh.Token(woosh.OP, '.', 469, 23, 469, 24),
woosh.Token(woosh.NAME, 'get', 469, 24, 469, 27),
woosh.Token(woosh.OP, '(', 469, 27, 469, 28),
woosh.Token(woosh.NAME, 'section', 469, 28, 469, 35),
woosh.Token(woosh.OP, ',', 469, 35, 469, 36),
woosh.Token(woosh.NAME, 'option', 469, 37, 469, 43),
woosh.Token(woosh.OP, ',', 469, 43, 469, 44),
woosh.Token(woosh.NAME, 'raw', 469, 45, 469, 48),
woosh.Token(woosh.OP, '=', 469, 48, 469, 49),
woosh.Token(woosh.NAME, 'True', 469, 49, 469, 53),
woosh.Token(woosh.OP, ',', 469, 53, 469, 54),
woosh.Token(woosh.NAME, 'fallback', 469, 55, 469, 63),
woosh.Token(woosh.OP, '=', 469, 63, 469, 64),
woosh.Token(woosh.NAME, 'rest', 469, 64, 469, 68),
woosh.Token(woosh.OP, ')', 469, 68, 469, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 469, 69, 470, 0),
woosh.Token(woosh.NAME, 'if', 470, 8, 470, 10),
woosh.Token(woosh.NAME, 'depth', 470, 11, 470, 16),
woosh.Token(woosh.OP, '>', 470, 17, 470, 18),
woosh.Token(woosh.NAME, 'MAX_INTERPOLATION_DEPTH', 470, 19, 470, 42),
woosh.Token(woosh.OP, ':', 470, 42, 470, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 470, 43, 471, 0),
woosh.Token(woosh.INDENT, ' ', 471, 0, 471, 12),
woosh.Token(woosh.NAME, 'raise', 471, 12, 471, 17),
woosh.Token(woosh.NAME, 'InterpolationDepthError', 471, 18, 471, 41),
woosh.Token(woosh.OP, '(', 471, 41, 471, 42),
woosh.Token(woosh.NAME, 'option', 471, 42, 471, 48),
woosh.Token(woosh.OP, ',', 471, 48, 471, 49),
woosh.Token(woosh.NAME, 'section', 471, 50, 471, 57),
woosh.Token(woosh.OP, ',', 471, 57, 471, 58),
woosh.Token(woosh.NAME, 'rawval', 471, 59, 471, 65),
woosh.Token(woosh.OP, ')', 471, 65, 471, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 471, 66, 472, 0),
woosh.Token(woosh.DEDENT, ' ', 472, 0, 472, 8),
woosh.Token(woosh.NAME, 'while', 472, 8, 472, 13),
woosh.Token(woosh.NAME, 'rest', 472, 14, 472, 18),
woosh.Token(woosh.OP, ':', 472, 18, 472, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 472, 19, 473, 0),
woosh.Token(woosh.INDENT, ' ', 473, 0, 473, 12),
woosh.Token(woosh.NAME, 'p', 473, 12, 473, 13),
woosh.Token(woosh.OP, '=', 473, 14, 473, 15),
woosh.Token(woosh.NAME, 'rest', 473, 16, 473, 20),
woosh.Token(woosh.OP, '.', 473, 20, 473, 21),
woosh.Token(woosh.NAME, 'find', 473, 21, 473, 25),
woosh.Token(woosh.OP, '(', 473, 25, 473, 26),
woosh.Token(woosh.STRING, '"$"', 473, 26, 473, 29),
woosh.Token(woosh.OP, ')', 473, 29, 473, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 473, 30, 474, 0),
woosh.Token(woosh.NAME, 'if', 474, 12, 474, 14),
woosh.Token(woosh.NAME, 'p', 474, 15, 474, 16),
woosh.Token(woosh.OP, '<', 474, 17, 474, 18),
woosh.Token(woosh.NUMBER, '0', 474, 19, 474, 20),
woosh.Token(woosh.OP, ':', 474, 20, 474, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 474, 21, 475, 0),
woosh.Token(woosh.INDENT, ' ', 475, 0, 475, 16),
woosh.Token(woosh.NAME, 'accum', 475, 16, 475, 21),
woosh.Token(woosh.OP, '.', 475, 21, 475, 22),
woosh.Token(woosh.NAME, 'append', 475, 22, 475, 28),
woosh.Token(woosh.OP, '(', 475, 28, 475, 29),
woosh.Token(woosh.NAME, 'rest', 475, 29, 475, 33),
woosh.Token(woosh.OP, ')', 475, 33, 475, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 475, 34, 476, 0),
woosh.Token(woosh.NAME, 'return', 476, 16, 476, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 476, 22, 477, 0),
woosh.Token(woosh.DEDENT, ' ', 477, 0, 477, 12),
woosh.Token(woosh.NAME, 'if', 477, 12, 477, 14),
woosh.Token(woosh.NAME, 'p', 477, 15, 477, 16),
woosh.Token(woosh.OP, '>', 477, 17, 477, 18),
woosh.Token(woosh.NUMBER, '0', 477, 19, 477, 20),
woosh.Token(woosh.OP, ':', 477, 20, 477, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 477, 21, 478, 0),
woosh.Token(woosh.INDENT, ' ', 478, 0, 478, 16),
woosh.Token(woosh.NAME, 'accum', 478, 16, 478, 21),
woosh.Token(woosh.OP, '.', 478, 21, 478, 22),
woosh.Token(woosh.NAME, 'append', 478, 22, 478, 28),
woosh.Token(woosh.OP, '(', 478, 28, 478, 29),
woosh.Token(woosh.NAME, 'rest', 478, 29, 478, 33),
woosh.Token(woosh.OP, '[', 478, 33, 478, 34),
woosh.Token(woosh.OP, ':', 478, 34, 478, 35),
woosh.Token(woosh.NAME, 'p', 478, 35, 478, 36),
woosh.Token(woosh.OP, ']', 478, 36, 478, 37),
woosh.Token(woosh.OP, ')', 478, 37, 478, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 478, 38, 479, 0),
woosh.Token(woosh.NAME, 'rest', 479, 16, 479, 20),
woosh.Token(woosh.OP, '=', 479, 21, 479, 22),
woosh.Token(woosh.NAME, 'rest', 479, 23, 479, 27),
woosh.Token(woosh.OP, '[', 479, 27, 479, 28),
woosh.Token(woosh.NAME, 'p', 479, 28, 479, 29),
woosh.Token(woosh.OP, ':', 479, 29, 479, 30),
woosh.Token(woosh.OP, ']', 479, 30, 479, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 479, 31, 480, 0),
woosh.Token(woosh.COMMENT, '# p is no longer used', 480, 12, 480, 33),
woosh.Token(woosh.DEDENT, ' ', 481, 0, 481, 12),
woosh.Token(woosh.NAME, 'c', 481, 12, 481, 13),
woosh.Token(woosh.OP, '=', 481, 14, 481, 15),
woosh.Token(woosh.NAME, 'rest', 481, 16, 481, 20),
woosh.Token(woosh.OP, '[', 481, 20, 481, 21),
woosh.Token(woosh.NUMBER, '1', 481, 21, 481, 22),
woosh.Token(woosh.OP, ':', 481, 22, 481, 23),
woosh.Token(woosh.NUMBER, '2', 481, 23, 481, 24),
woosh.Token(woosh.OP, ']', 481, 24, 481, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 481, 25, 482, 0),
woosh.Token(woosh.NAME, 'if', 482, 12, 482, 14),
woosh.Token(woosh.NAME, 'c', 482, 15, 482, 16),
woosh.Token(woosh.OP, '==', 482, 17, 482, 19),
woosh.Token(woosh.STRING, '"$"', 482, 20, 482, 23),
woosh.Token(woosh.OP, ':', 482, 23, 482, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 482, 24, 483, 0),
woosh.Token(woosh.INDENT, ' ', 483, 0, 483, 16),
woosh.Token(woosh.NAME, 'accum', 483, 16, 483, 21),
woosh.Token(woosh.OP, '.', 483, 21, 483, 22),
woosh.Token(woosh.NAME, 'append', 483, 22, 483, 28),
woosh.Token(woosh.OP, '(', 483, 28, 483, 29),
woosh.Token(woosh.STRING, '"$"', 483, 29, 483, 32),
woosh.Token(woosh.OP, ')', 483, 32, 483, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 483, 33, 484, 0),
woosh.Token(woosh.NAME, 'rest', 484, 16, 484, 20),
woosh.Token(woosh.OP, '=', 484, 21, 484, 22),
woosh.Token(woosh.NAME, 'rest', 484, 23, 484, 27),
woosh.Token(woosh.OP, '[', 484, 27, 484, 28),
woosh.Token(woosh.NUMBER, '2', 484, 28, 484, 29),
woosh.Token(woosh.OP, ':', 484, 29, 484, 30),
woosh.Token(woosh.OP, ']', 484, 30, 484, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 484, 31, 485, 0),
woosh.Token(woosh.DEDENT, ' ', 485, 0, 485, 12),
woosh.Token(woosh.NAME, 'elif', 485, 12, 485, 16),
woosh.Token(woosh.NAME, 'c', 485, 17, 485, 18),
woosh.Token(woosh.OP, '==', 485, 19, 485, 21),
woosh.Token(woosh.STRING, '"{"', 485, 22, 485, 25),
woosh.Token(woosh.OP, ':', 485, 25, 485, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 485, 26, 486, 0),
woosh.Token(woosh.INDENT, ' ', 486, 0, 486, 16),
woosh.Token(woosh.NAME, 'm', 486, 16, 486, 17),
woosh.Token(woosh.OP, '=', 486, 18, 486, 19),
woosh.Token(woosh.NAME, 'self', 486, 20, 486, 24),
woosh.Token(woosh.OP, '.', 486, 24, 486, 25),
woosh.Token(woosh.NAME, '_KEYCRE', 486, 25, 486, 32),
woosh.Token(woosh.OP, '.', 486, 32, 486, 33),
woosh.Token(woosh.NAME, 'match', 486, 33, 486, 38),
woosh.Token(woosh.OP, '(', 486, 38, 486, 39),
woosh.Token(woosh.NAME, 'rest', 486, 39, 486, 43),
woosh.Token(woosh.OP, ')', 486, 43, 486, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 486, 44, 487, 0),
woosh.Token(woosh.NAME, 'if', 487, 16, 487, 18),
woosh.Token(woosh.NAME, 'm', 487, 19, 487, 20),
woosh.Token(woosh.NAME, 'is', 487, 21, 487, 23),
woosh.Token(woosh.NAME, 'None', 487, 24, 487, 28),
woosh.Token(woosh.OP, ':', 487, 28, 487, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 487, 29, 488, 0),
woosh.Token(woosh.INDENT, ' ', 488, 0, 488, 20),
woosh.Token(woosh.NAME, 'raise', 488, 20, 488, 25),
woosh.Token(woosh.NAME, 'InterpolationSyntaxError', 488, 26, 488, 50),
woosh.Token(woosh.OP, '(', 488, 50, 488, 51),
woosh.Token(woosh.NAME, 'option', 488, 51, 488, 57),
woosh.Token(woosh.OP, ',', 488, 57, 488, 58),
woosh.Token(woosh.NAME, 'section', 488, 59, 488, 66),
woosh.Token(woosh.OP, ',', 488, 66, 488, 67),
woosh.Token(woosh.STRING, '"bad interpolation variable reference %r"', 489, 24, 489, 65),
woosh.Token(woosh.OP, '%', 489, 66, 489, 67),
woosh.Token(woosh.NAME, 'rest', 489, 68, 489, 72),
woosh.Token(woosh.OP, ')', 489, 72, 489, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 489, 73, 490, 0),
woosh.Token(woosh.DEDENT, ' ', 490, 0, 490, 16),
woosh.Token(woosh.NAME, 'path', 490, 16, 490, 20),
woosh.Token(woosh.OP, '=', 490, 21, 490, 22),
woosh.Token(woosh.NAME, 'm', 490, 23, 490, 24),
woosh.Token(woosh.OP, '.', 490, 24, 490, 25),
woosh.Token(woosh.NAME, 'group', 490, 25, 490, 30),
woosh.Token(woosh.OP, '(', 490, 30, 490, 31),
woosh.Token(woosh.NUMBER, '1', 490, 31, 490, 32),
woosh.Token(woosh.OP, ')', 490, 32, 490, 33),
woosh.Token(woosh.OP, '.', 490, 33, 490, 34),
woosh.Token(woosh.NAME, 'split', 490, 34, 490, 39),
woosh.Token(woosh.OP, '(', 490, 39, 490, 40),
woosh.Token(woosh.STRING, "':'", 490, 40, 490, 43),
woosh.Token(woosh.OP, ')', 490, 43, 490, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 490, 44, 491, 0),
woosh.Token(woosh.NAME, 'rest', 491, 16, 491, 20),
woosh.Token(woosh.OP, '=', 491, 21, 491, 22),
woosh.Token(woosh.NAME, 'rest', 491, 23, 491, 27),
woosh.Token(woosh.OP, '[', 491, 27, 491, 28),
woosh.Token(woosh.NAME, 'm', 491, 28, 491, 29),
woosh.Token(woosh.OP, '.', 491, 29, 491, 30),
woosh.Token(woosh.NAME, 'end', 491, 30, 491, 33),
woosh.Token(woosh.OP, '(', 491, 33, 491, 34),
woosh.Token(woosh.OP, ')', 491, 34, 491, 35),
woosh.Token(woosh.OP, ':', 491, 35, 491, 36),
woosh.Token(woosh.OP, ']', 491, 36, 491, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 491, 37, 492, 0),
woosh.Token(woosh.NAME, 'sect', 492, 16, 492, 20),
woosh.Token(woosh.OP, '=', 492, 21, 492, 22),
woosh.Token(woosh.NAME, 'section', 492, 23, 492, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 492, 30, 493, 0),
woosh.Token(woosh.NAME, 'opt', 493, 16, 493, 19),
woosh.Token(woosh.OP, '=', 493, 20, 493, 21),
woosh.Token(woosh.NAME, 'option', 493, 22, 493, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 493, 28, 494, 0),
woosh.Token(woosh.NAME, 'try', 494, 16, 494, 19),
woosh.Token(woosh.OP, ':', 494, 19, 494, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 494, 20, 495, 0),
woosh.Token(woosh.INDENT, ' ', 495, 0, 495, 20),
woosh.Token(woosh.NAME, 'if', 495, 20, 495, 22),
woosh.Token(woosh.NAME, 'len', 495, 23, 495, 26),
woosh.Token(woosh.OP, '(', 495, 26, 495, 27),
woosh.Token(woosh.NAME, 'path', 495, 27, 495, 31),
woosh.Token(woosh.OP, ')', 495, 31, 495, 32),
woosh.Token(woosh.OP, '==', 495, 33, 495, 35),
woosh.Token(woosh.NUMBER, '1', 495, 36, 495, 37),
woosh.Token(woosh.OP, ':', 495, 37, 495, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 495, 38, 496, 0),
woosh.Token(woosh.INDENT, ' ', 496, 0, 496, 24),
woosh.Token(woosh.NAME, 'opt', 496, 24, 496, 27),
woosh.Token(woosh.OP, '=', 496, 28, 496, 29),
woosh.Token(woosh.NAME, 'parser', 496, 30, 496, 36),
woosh.Token(woosh.OP, '.', 496, 36, 496, 37),
woosh.Token(woosh.NAME, 'optionxform', 496, 37, 496, 48),
woosh.Token(woosh.OP, '(', 496, 48, 496, 49),
woosh.Token(woosh.NAME, 'path', 496, 49, 496, 53),
woosh.Token(woosh.OP, '[', 496, 53, 496, 54),
woosh.Token(woosh.NUMBER, '0', 496, 54, 496, 55),
woosh.Token(woosh.OP, ']', 496, 55, 496, 56),
woosh.Token(woosh.OP, ')', 496, 56, 496, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 496, 57, 497, 0),
woosh.Token(woosh.NAME, 'v', 497, 24, 497, 25),
woosh.Token(woosh.OP, '=', 497, 26, 497, 27),
woosh.Token(woosh.NAME, 'map', 497, 28, 497, 31),
woosh.Token(woosh.OP, '[', 497, 31, 497, 32),
woosh.Token(woosh.NAME, 'opt', 497, 32, 497, 35),
woosh.Token(woosh.OP, ']', 497, 35, 497, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 497, 36, 498, 0),
woosh.Token(woosh.DEDENT, ' ', 498, 0, 498, 20),
woosh.Token(woosh.NAME, 'elif', 498, 20, 498, 24),
woosh.Token(woosh.NAME, 'len', 498, 25, 498, 28),
woosh.Token(woosh.OP, '(', 498, 28, 498, 29),
woosh.Token(woosh.NAME, 'path', 498, 29, 498, 33),
woosh.Token(woosh.OP, ')', 498, 33, 498, 34),
woosh.Token(woosh.OP, '==', 498, 35, 498, 37),
woosh.Token(woosh.NUMBER, '2', 498, 38, 498, 39),
woosh.Token(woosh.OP, ':', 498, 39, 498, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 498, 40, 499, 0),
woosh.Token(woosh.INDENT, ' ', 499, 0, 499, 24),
woosh.Token(woosh.NAME, 'sect', 499, 24, 499, 28),
woosh.Token(woosh.OP, '=', 499, 29, 499, 30),
woosh.Token(woosh.NAME, 'path', 499, 31, 499, 35),
woosh.Token(woosh.OP, '[', 499, 35, 499, 36),
woosh.Token(woosh.NUMBER, '0', 499, 36, 499, 37),
woosh.Token(woosh.OP, ']', 499, 37, 499, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 499, 38, 500, 0),
woosh.Token(woosh.NAME, 'opt', 500, 24, 500, 27),
woosh.Token(woosh.OP, '=', 500, 28, 500, 29),
woosh.Token(woosh.NAME, 'parser', 500, 30, 500, 36),
woosh.Token(woosh.OP, '.', 500, 36, 500, 37),
woosh.Token(woosh.NAME, 'optionxform', 500, 37, 500, 48),
woosh.Token(woosh.OP, '(', 500, 48, 500, 49),
woosh.Token(woosh.NAME, 'path', 500, 49, 500, 53),
woosh.Token(woosh.OP, '[', 500, 53, 500, 54),
woosh.Token(woosh.NUMBER, '1', 500, 54, 500, 55),
woosh.Token(woosh.OP, ']', 500, 55, 500, 56),
woosh.Token(woosh.OP, ')', 500, 56, 500, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 500, 57, 501, 0),
woosh.Token(woosh.NAME, 'v', 501, 24, 501, 25),
woosh.Token(woosh.OP, '=', 501, 26, 501, 27),
woosh.Token(woosh.NAME, 'parser', 501, 28, 501, 34),
woosh.Token(woosh.OP, '.', 501, 34, 501, 35),
woosh.Token(woosh.NAME, 'get', 501, 35, 501, 38),
woosh.Token(woosh.OP, '(', 501, 38, 501, 39),
woosh.Token(woosh.NAME, 'sect', 501, 39, 501, 43),
woosh.Token(woosh.OP, ',', 501, 43, 501, 44),
woosh.Token(woosh.NAME, 'opt', 501, 45, 501, 48),
woosh.Token(woosh.OP, ',', 501, 48, 501, 49),
woosh.Token(woosh.NAME, 'raw', 501, 50, 501, 53),
woosh.Token(woosh.OP, '=', 501, 53, 501, 54),
woosh.Token(woosh.NAME, 'True', 501, 54, 501, 58),
woosh.Token(woosh.OP, ')', 501, 58, 501, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 501, 59, 502, 0),
woosh.Token(woosh.DEDENT, ' ', 502, 0, 502, 20),
woosh.Token(woosh.NAME, 'else', 502, 20, 502, 24),
woosh.Token(woosh.OP, ':', 502, 24, 502, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 502, 25, 503, 0),
woosh.Token(woosh.INDENT, ' ', 503, 0, 503, 24),
woosh.Token(woosh.NAME, 'raise', 503, 24, 503, 29),
woosh.Token(woosh.NAME, 'InterpolationSyntaxError', 503, 30, 503, 54),
woosh.Token(woosh.OP, '(', 503, 54, 503, 55),
woosh.Token(woosh.NAME, 'option', 504, 28, 504, 34),
woosh.Token(woosh.OP, ',', 504, 34, 504, 35),
woosh.Token(woosh.NAME, 'section', 504, 36, 504, 43),
woosh.Token(woosh.OP, ',', 504, 43, 504, 44),
woosh.Token(woosh.STRING, '"More than one \':\' found: %r"', 505, 28, 505, 57),
woosh.Token(woosh.OP, '%', 505, 58, 505, 59),
woosh.Token(woosh.OP, '(', 505, 60, 505, 61),
woosh.Token(woosh.NAME, 'rest', 505, 61, 505, 65),
woosh.Token(woosh.OP, ',', 505, 65, 505, 66),
woosh.Token(woosh.OP, ')', 505, 66, 505, 67),
woosh.Token(woosh.OP, ')', 505, 67, 505, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 505, 68, 506, 0),
woosh.Token(woosh.DEDENT, ' ', 506, 0, 506, 16),
woosh.Token(woosh.DEDENT, '', 506, 16, 506, 16),
woosh.Token(woosh.NAME, 'except', 506, 16, 506, 22),
woosh.Token(woosh.OP, '(', 506, 23, 506, 24),
woosh.Token(woosh.NAME, 'KeyError', 506, 24, 506, 32),
woosh.Token(woosh.OP, ',', 506, 32, 506, 33),
woosh.Token(woosh.NAME, 'NoSectionError', 506, 34, 506, 48),
woosh.Token(woosh.OP, ',', 506, 48, 506, 49),
woosh.Token(woosh.NAME, 'NoOptionError', 506, 50, 506, 63),
woosh.Token(woosh.OP, ')', 506, 63, 506, 64),
woosh.Token(woosh.OP, ':', 506, 64, 506, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 506, 65, 507, 0),
woosh.Token(woosh.INDENT, ' ', 507, 0, 507, 20),
woosh.Token(woosh.NAME, 'raise', 507, 20, 507, 25),
woosh.Token(woosh.NAME, 'InterpolationMissingOptionError', 507, 26, 507, 57),
woosh.Token(woosh.OP, '(', 507, 57, 507, 58),
woosh.Token(woosh.NAME, 'option', 508, 24, 508, 30),
woosh.Token(woosh.OP, ',', 508, 30, 508, 31),
woosh.Token(woosh.NAME, 'section', 508, 32, 508, 39),
woosh.Token(woosh.OP, ',', 508, 39, 508, 40),
woosh.Token(woosh.NAME, 'rawval', 508, 41, 508, 47),
woosh.Token(woosh.OP, ',', 508, 47, 508, 48),
woosh.Token(woosh.STRING, '":"', 508, 49, 508, 52),
woosh.Token(woosh.OP, '.', 508, 52, 508, 53),
woosh.Token(woosh.NAME, 'join', 508, 53, 508, 57),
woosh.Token(woosh.OP, '(', 508, 57, 508, 58),
woosh.Token(woosh.NAME, 'path', 508, 58, 508, 62),
woosh.Token(woosh.OP, ')', 508, 62, 508, 63),
woosh.Token(woosh.OP, ')', 508, 63, 508, 64),
woosh.Token(woosh.NAME, 'from', 508, 65, 508, 69),
woosh.Token(woosh.NAME, 'None', 508, 70, 508, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 508, 74, 509, 0),
woosh.Token(woosh.DEDENT, ' ', 509, 0, 509, 16),
woosh.Token(woosh.NAME, 'if', 509, 16, 509, 18),
woosh.Token(woosh.STRING, '"$"', 509, 19, 509, 22),
woosh.Token(woosh.NAME, 'in', 509, 23, 509, 25),
woosh.Token(woosh.NAME, 'v', 509, 26, 509, 27),
woosh.Token(woosh.OP, ':', 509, 27, 509, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 509, 28, 510, 0),
woosh.Token(woosh.INDENT, ' ', 510, 0, 510, 20),
woosh.Token(woosh.NAME, 'self', 510, 20, 510, 24),
woosh.Token(woosh.OP, '.', 510, 24, 510, 25),
woosh.Token(woosh.NAME, '_interpolate_some', 510, 25, 510, 42),
woosh.Token(woosh.OP, '(', 510, 42, 510, 43),
woosh.Token(woosh.NAME, 'parser', 510, 43, 510, 49),
woosh.Token(woosh.OP, ',', 510, 49, 510, 50),
woosh.Token(woosh.NAME, 'opt', 510, 51, 510, 54),
woosh.Token(woosh.OP, ',', 510, 54, 510, 55),
woosh.Token(woosh.NAME, 'accum', 510, 56, 510, 61),
woosh.Token(woosh.OP, ',', 510, 61, 510, 62),
woosh.Token(woosh.NAME, 'v', 510, 63, 510, 64),
woosh.Token(woosh.OP, ',', 510, 64, 510, 65),
woosh.Token(woosh.NAME, 'sect', 510, 66, 510, 70),
woosh.Token(woosh.OP, ',', 510, 70, 510, 71),
woosh.Token(woosh.NAME, 'dict', 511, 43, 511, 47),
woosh.Token(woosh.OP, '(', 511, 47, 511, 48),
woosh.Token(woosh.NAME, 'parser', 511, 48, 511, 54),
woosh.Token(woosh.OP, '.', 511, 54, 511, 55),
woosh.Token(woosh.NAME, 'items', 511, 55, 511, 60),
woosh.Token(woosh.OP, '(', 511, 60, 511, 61),
woosh.Token(woosh.NAME, 'sect', 511, 61, 511, 65),
woosh.Token(woosh.OP, ',', 511, 65, 511, 66),
woosh.Token(woosh.NAME, 'raw', 511, 67, 511, 70),
woosh.Token(woosh.OP, '=', 511, 70, 511, 71),
woosh.Token(woosh.NAME, 'True', 511, 71, 511, 75),
woosh.Token(woosh.OP, ')', 511, 75, 511, 76),
woosh.Token(woosh.OP, ')', 511, 76, 511, 77),
woosh.Token(woosh.OP, ',', 511, 77, 511, 78),
woosh.Token(woosh.NAME, 'depth', 512, 43, 512, 48),
woosh.Token(woosh.OP, '+', 512, 49, 512, 50),
woosh.Token(woosh.NUMBER, '1', 512, 51, 512, 52),
woosh.Token(woosh.OP, ')', 512, 52, 512, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 512, 53, 513, 0),
woosh.Token(woosh.DEDENT, ' ', 513, 0, 513, 16),
woosh.Token(woosh.NAME, 'else', 513, 16, 513, 20),
woosh.Token(woosh.OP, ':', 513, 20, 513, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 513, 21, 514, 0),
woosh.Token(woosh.INDENT, ' ', 514, 0, 514, 20),
woosh.Token(woosh.NAME, 'accum', 514, 20, 514, 25),
woosh.Token(woosh.OP, '.', 514, 25, 514, 26),
woosh.Token(woosh.NAME, 'append', 514, 26, 514, 32),
woosh.Token(woosh.OP, '(', 514, 32, 514, 33),
woosh.Token(woosh.NAME, 'v', 514, 33, 514, 34),
woosh.Token(woosh.OP, ')', 514, 34, 514, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 514, 35, 515, 0),
woosh.Token(woosh.DEDENT, ' ', 515, 0, 515, 12),
woosh.Token(woosh.DEDENT, '', 515, 12, 515, 12),
woosh.Token(woosh.NAME, 'else', 515, 12, 515, 16),
woosh.Token(woosh.OP, ':', 515, 16, 515, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 515, 17, 516, 0),
woosh.Token(woosh.INDENT, ' ', 516, 0, 516, 16),
woosh.Token(woosh.NAME, 'raise', 516, 16, 516, 21),
woosh.Token(woosh.NAME, 'InterpolationSyntaxError', 516, 22, 516, 46),
woosh.Token(woosh.OP, '(', 516, 46, 516, 47),
woosh.Token(woosh.NAME, 'option', 517, 20, 517, 26),
woosh.Token(woosh.OP, ',', 517, 26, 517, 27),
woosh.Token(woosh.NAME, 'section', 517, 28, 517, 35),
woosh.Token(woosh.OP, ',', 517, 35, 517, 36),
woosh.Token(woosh.STRING, '"\'$\' must be followed by \'$\' or \'{\', "', 518, 20, 518, 58),
woosh.Token(woosh.STRING, '"found: %r"', 519, 20, 519, 31),
woosh.Token(woosh.OP, '%', 519, 32, 519, 33),
woosh.Token(woosh.OP, '(', 519, 34, 519, 35),
woosh.Token(woosh.NAME, 'rest', 519, 35, 519, 39),
woosh.Token(woosh.OP, ',', 519, 39, 519, 40),
woosh.Token(woosh.OP, ')', 519, 40, 519, 41),
woosh.Token(woosh.OP, ')', 519, 41, 519, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 519, 42, 520, 0),
woosh.Token(woosh.DEDENT, '', 522, 0, 522, 0),
woosh.Token(woosh.DEDENT, '', 522, 0, 522, 0),
woosh.Token(woosh.DEDENT, '', 522, 0, 522, 0),
woosh.Token(woosh.DEDENT, '', 522, 0, 522, 0),
woosh.Token(woosh.NAME, 'class', 522, 0, 522, 5),
woosh.Token(woosh.NAME, 'LegacyInterpolation', 522, 6, 522, 25),
woosh.Token(woosh.OP, '(', 522, 25, 522, 26),
woosh.Token(woosh.NAME, 'Interpolation', 522, 26, 522, 39),
woosh.Token(woosh.OP, ')', 522, 39, 522, 40),
woosh.Token(woosh.OP, ':', 522, 40, 522, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 522, 41, 523, 0),
woosh.Token(woosh.INDENT, ' ', 523, 0, 523, 4),
woosh.Token(woosh.STRING, '"""Deprecated interpolation used in old versions of ConfigParser.\r\n Use BasicInterpolation or ExtendedInterpolation instead."""', 523, 4, 524, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 524, 63, 525, 0),
woosh.Token(woosh.NAME, '_KEYCRE', 526, 4, 526, 11),
woosh.Token(woosh.OP, '=', 526, 12, 526, 13),
woosh.Token(woosh.NAME, 're', 526, 14, 526, 16),
woosh.Token(woosh.OP, '.', 526, 16, 526, 17),
woosh.Token(woosh.NAME, 'compile', 526, 17, 526, 24),
woosh.Token(woosh.OP, '(', 526, 24, 526, 25),
woosh.Token(woosh.STRING, 'r"%\\(([^)]*)\\)s|."', 526, 25, 526, 43),
woosh.Token(woosh.OP, ')', 526, 43, 526, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 526, 44, 527, 0),
woosh.Token(woosh.NAME, 'def', 528, 4, 528, 7),
woosh.Token(woosh.NAME, 'before_get', 528, 8, 528, 18),
woosh.Token(woosh.OP, '(', 528, 18, 528, 19),
woosh.Token(woosh.NAME, 'self', 528, 19, 528, 23),
woosh.Token(woosh.OP, ',', 528, 23, 528, 24),
woosh.Token(woosh.NAME, 'parser', 528, 25, 528, 31),
woosh.Token(woosh.OP, ',', 528, 31, 528, 32),
woosh.Token(woosh.NAME, 'section', 528, 33, 528, 40),
woosh.Token(woosh.OP, ',', 528, 40, 528, 41),
woosh.Token(woosh.NAME, 'option', 528, 42, 528, 48),
woosh.Token(woosh.OP, ',', 528, 48, 528, 49),
woosh.Token(woosh.NAME, 'value', 528, 50, 528, 55),
woosh.Token(woosh.OP, ',', 528, 55, 528, 56),
woosh.Token(woosh.NAME, 'vars', 528, 57, 528, 61),
woosh.Token(woosh.OP, ')', 528, 61, 528, 62),
woosh.Token(woosh.OP, ':', 528, 62, 528, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 528, 63, 529, 0),
woosh.Token(woosh.INDENT, ' ', 529, 0, 529, 8),
woosh.Token(woosh.NAME, 'rawval', 529, 8, 529, 14),
woosh.Token(woosh.OP, '=', 529, 15, 529, 16),
woosh.Token(woosh.NAME, 'value', 529, 17, 529, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 529, 22, 530, 0),
woosh.Token(woosh.NAME, 'depth', 530, 8, 530, 13),
woosh.Token(woosh.OP, '=', 530, 14, 530, 15),
woosh.Token(woosh.NAME, 'MAX_INTERPOLATION_DEPTH', 530, 16, 530, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 530, 39, 531, 0),
woosh.Token(woosh.NAME, 'while', 531, 8, 531, 13),
woosh.Token(woosh.NAME, 'depth', 531, 14, 531, 19),
woosh.Token(woosh.OP, ':', 531, 19, 531, 20),
woosh.Token(woosh.COMMENT, "# Loop through this until it's done", 531, 40, 531, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 531, 75, 532, 0),
woosh.Token(woosh.INDENT, ' ', 532, 0, 532, 12),
woosh.Token(woosh.NAME, 'depth', 532, 12, 532, 17),
woosh.Token(woosh.OP, '-=', 532, 18, 532, 20),
woosh.Token(woosh.NUMBER, '1', 532, 21, 532, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 532, 22, 533, 0),
woosh.Token(woosh.NAME, 'if', 533, 12, 533, 14),
woosh.Token(woosh.NAME, 'value', 533, 15, 533, 20),
woosh.Token(woosh.NAME, 'and', 533, 21, 533, 24),
woosh.Token(woosh.STRING, '"%("', 533, 25, 533, 29),
woosh.Token(woosh.NAME, 'in', 533, 30, 533, 32),
woosh.Token(woosh.NAME, 'value', 533, 33, 533, 38),
woosh.Token(woosh.OP, ':', 533, 38, 533, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 533, 39, 534, 0),
woosh.Token(woosh.INDENT, ' ', 534, 0, 534, 16),
woosh.Token(woosh.NAME, 'replace', 534, 16, 534, 23),
woosh.Token(woosh.OP, '=', 534, 24, 534, 25),
woosh.Token(woosh.NAME, 'functools', 534, 26, 534, 35),
woosh.Token(woosh.OP, '.', 534, 35, 534, 36),
woosh.Token(woosh.NAME, 'partial', 534, 36, 534, 43),
woosh.Token(woosh.OP, '(', 534, 43, 534, 44),
woosh.Token(woosh.NAME, 'self', 534, 44, 534, 48),
woosh.Token(woosh.OP, '.', 534, 48, 534, 49),
woosh.Token(woosh.NAME, '_interpolation_replace', 534, 49, 534, 71),
woosh.Token(woosh.OP, ',', 534, 71, 534, 72),
woosh.Token(woosh.NAME, 'parser', 535, 44, 535, 50),
woosh.Token(woosh.OP, '=', 535, 50, 535, 51),
woosh.Token(woosh.NAME, 'parser', 535, 51, 535, 57),
woosh.Token(woosh.OP, ')', 535, 57, 535, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 535, 58, 536, 0),
woosh.Token(woosh.NAME, 'value', 536, 16, 536, 21),
woosh.Token(woosh.OP, '=', 536, 22, 536, 23),
woosh.Token(woosh.NAME, 'self', 536, 24, 536, 28),
woosh.Token(woosh.OP, '.', 536, 28, 536, 29),
woosh.Token(woosh.NAME, '_KEYCRE', 536, 29, 536, 36),
woosh.Token(woosh.OP, '.', 536, 36, 536, 37),
woosh.Token(woosh.NAME, 'sub', 536, 37, 536, 40),
woosh.Token(woosh.OP, '(', 536, 40, 536, 41),
woosh.Token(woosh.NAME, 'replace', 536, 41, 536, 48),
woosh.Token(woosh.OP, ',', 536, 48, 536, 49),
woosh.Token(woosh.NAME, 'value', 536, 50, 536, 55),
woosh.Token(woosh.OP, ')', 536, 55, 536, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 536, 56, 537, 0),
woosh.Token(woosh.NAME, 'try', 537, 16, 537, 19),
woosh.Token(woosh.OP, ':', 537, 19, 537, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 537, 20, 538, 0),
woosh.Token(woosh.INDENT, ' ', 538, 0, 538, 20),
woosh.Token(woosh.NAME, 'value', 538, 20, 538, 25),
woosh.Token(woosh.OP, '=', 538, 26, 538, 27),
woosh.Token(woosh.NAME, 'value', 538, 28, 538, 33),
woosh.Token(woosh.OP, '%', 538, 34, 538, 35),
woosh.Token(woosh.NAME, 'vars', 538, 36, 538, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 538, 40, 539, 0),
woosh.Token(woosh.DEDENT, ' ', 539, 0, 539, 16),
woosh.Token(woosh.NAME, 'except', 539, 16, 539, 22),
woosh.Token(woosh.NAME, 'KeyError', 539, 23, 539, 31),
woosh.Token(woosh.NAME, 'as', 539, 32, 539, 34),
woosh.Token(woosh.NAME, 'e', 539, 35, 539, 36),
woosh.Token(woosh.OP, ':', 539, 36, 539, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 539, 37, 540, 0),
woosh.Token(woosh.INDENT, ' ', 540, 0, 540, 20),
woosh.Token(woosh.NAME, 'raise', 540, 20, 540, 25),
woosh.Token(woosh.NAME, 'InterpolationMissingOptionError', 540, 26, 540, 57),
woosh.Token(woosh.OP, '(', 540, 57, 540, 58),
woosh.Token(woosh.NAME, 'option', 541, 24, 541, 30),
woosh.Token(woosh.OP, ',', 541, 30, 541, 31),
woosh.Token(woosh.NAME, 'section', 541, 32, 541, 39),
woosh.Token(woosh.OP, ',', 541, 39, 541, 40),
woosh.Token(woosh.NAME, 'rawval', 541, 41, 541, 47),
woosh.Token(woosh.OP, ',', 541, 47, 541, 48),
woosh.Token(woosh.NAME, 'e', 541, 49, 541, 50),
woosh.Token(woosh.OP, '.', 541, 50, 541, 51),
woosh.Token(woosh.NAME, 'args', 541, 51, 541, 55),
woosh.Token(woosh.OP, '[', 541, 55, 541, 56),
woosh.Token(woosh.NUMBER, '0', 541, 56, 541, 57),
woosh.Token(woosh.OP, ']', 541, 57, 541, 58),
woosh.Token(woosh.OP, ')', 541, 58, 541, 59),
woosh.Token(woosh.NAME, 'from', 541, 60, 541, 64),
woosh.Token(woosh.NAME, 'None', 541, 65, 541, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 541, 69, 542, 0),
woosh.Token(woosh.DEDENT, ' ', 542, 0, 542, 12),
woosh.Token(woosh.DEDENT, '', 542, 12, 542, 12),
woosh.Token(woosh.NAME, 'else', 542, 12, 542, 16),
woosh.Token(woosh.OP, ':', 542, 16, 542, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 542, 17, 543, 0),
woosh.Token(woosh.INDENT, ' ', 543, 0, 543, 16),
woosh.Token(woosh.NAME, 'break', 543, 16, 543, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 543, 21, 544, 0),
woosh.Token(woosh.DEDENT, ' ', 544, 0, 544, 8),
woosh.Token(woosh.DEDENT, '', 544, 8, 544, 8),
woosh.Token(woosh.NAME, 'if', 544, 8, 544, 10),
woosh.Token(woosh.NAME, 'value', 544, 11, 544, 16),
woosh.Token(woosh.NAME, 'and', 544, 17, 544, 20),
woosh.Token(woosh.STRING, '"%("', 544, 21, 544, 25),
woosh.Token(woosh.NAME, 'in', 544, 26, 544, 28),
woosh.Token(woosh.NAME, 'value', 544, 29, 544, 34),
woosh.Token(woosh.OP, ':', 544, 34, 544, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 544, 35, 545, 0),
woosh.Token(woosh.INDENT, ' ', 545, 0, 545, 12),
woosh.Token(woosh.NAME, 'raise', 545, 12, 545, 17),
woosh.Token(woosh.NAME, 'InterpolationDepthError', 545, 18, 545, 41),
woosh.Token(woosh.OP, '(', 545, 41, 545, 42),
woosh.Token(woosh.NAME, 'option', 545, 42, 545, 48),
woosh.Token(woosh.OP, ',', 545, 48, 545, 49),
woosh.Token(woosh.NAME, 'section', 545, 50, 545, 57),
woosh.Token(woosh.OP, ',', 545, 57, 545, 58),
woosh.Token(woosh.NAME, 'rawval', 545, 59, 545, 65),
woosh.Token(woosh.OP, ')', 545, 65, 545, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 545, 66, 546, 0),
woosh.Token(woosh.DEDENT, ' ', 546, 0, 546, 8),
woosh.Token(woosh.NAME, 'return', 546, 8, 546, 14),
woosh.Token(woosh.NAME, 'value', 546, 15, 546, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 546, 20, 547, 0),
woosh.Token(woosh.DEDENT, ' ', 548, 0, 548, 4),
woosh.Token(woosh.NAME, 'def', 548, 4, 548, 7),
woosh.Token(woosh.NAME, 'before_set', 548, 8, 548, 18),
woosh.Token(woosh.OP, '(', 548, 18, 548, 19),
woosh.Token(woosh.NAME, 'self', 548, 19, 548, 23),
woosh.Token(woosh.OP, ',', 548, 23, 548, 24),
woosh.Token(woosh.NAME, 'parser', 548, 25, 548, 31),
woosh.Token(woosh.OP, ',', 548, 31, 548, 32),
woosh.Token(woosh.NAME, 'section', 548, 33, 548, 40),
woosh.Token(woosh.OP, ',', 548, 40, 548, 41),
woosh.Token(woosh.NAME, 'option', 548, 42, 548, 48),
woosh.Token(woosh.OP, ',', 548, 48, 548, 49),
woosh.Token(woosh.NAME, 'value', 548, 50, 548, 55),
woosh.Token(woosh.OP, ')', 548, 55, 548, 56),
woosh.Token(woosh.OP, ':', 548, 56, 548, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 548, 57, 549, 0),
woosh.Token(woosh.INDENT, ' ', 549, 0, 549, 8),
woosh.Token(woosh.NAME, 'return', 549, 8, 549, 14),
woosh.Token(woosh.NAME, 'value', 549, 15, 549, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 549, 20, 550, 0),
woosh.Token(woosh.DEDENT, ' ', 551, 0, 551, 4),
woosh.Token(woosh.OP, '@', 551, 4, 551, 5),
woosh.Token(woosh.NAME, 'staticmethod', 551, 5, 551, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 551, 17, 552, 0),
woosh.Token(woosh.NAME, 'def', 552, 4, 552, 7),
woosh.Token(woosh.NAME, '_interpolation_replace', 552, 8, 552, 30),
woosh.Token(woosh.OP, '(', 552, 30, 552, 31),
woosh.Token(woosh.NAME, 'match', 552, 31, 552, 36),
woosh.Token(woosh.OP, ',', 552, 36, 552, 37),
woosh.Token(woosh.NAME, 'parser', 552, 38, 552, 44),
woosh.Token(woosh.OP, ')', 552, 44, 552, 45),
woosh.Token(woosh.OP, ':', 552, 45, 552, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 552, 46, 553, 0),
woosh.Token(woosh.INDENT, ' ', 553, 0, 553, 8),
woosh.Token(woosh.NAME, 's', 553, 8, 553, 9),
woosh.Token(woosh.OP, '=', 553, 10, 553, 11),
woosh.Token(woosh.NAME, 'match', 553, 12, 553, 17),
woosh.Token(woosh.OP, '.', 553, 17, 553, 18),
woosh.Token(woosh.NAME, 'group', 553, 18, 553, 23),
woosh.Token(woosh.OP, '(', 553, 23, 553, 24),
woosh.Token(woosh.NUMBER, '1', 553, 24, 553, 25),
woosh.Token(woosh.OP, ')', 553, 25, 553, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 553, 26, 554, 0),
woosh.Token(woosh.NAME, 'if', 554, 8, 554, 10),
woosh.Token(woosh.NAME, 's', 554, 11, 554, 12),
woosh.Token(woosh.NAME, 'is', 554, 13, 554, 15),
woosh.Token(woosh.NAME, 'None', 554, 16, 554, 20),
woosh.Token(woosh.OP, ':', 554, 20, 554, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 554, 21, 555, 0),
woosh.Token(woosh.INDENT, ' ', 555, 0, 555, 12),
woosh.Token(woosh.NAME, 'return', 555, 12, 555, 18),
woosh.Token(woosh.NAME, 'match', 555, 19, 555, 24),
woosh.Token(woosh.OP, '.', 555, 24, 555, 25),
woosh.Token(woosh.NAME, 'group', 555, 25, 555, 30),
woosh.Token(woosh.OP, '(', 555, 30, 555, 31),
woosh.Token(woosh.OP, ')', 555, 31, 555, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 555, 32, 556, 0),
woosh.Token(woosh.DEDENT, ' ', 556, 0, 556, 8),
woosh.Token(woosh.NAME, 'else', 556, 8, 556, 12),
woosh.Token(woosh.OP, ':', 556, 12, 556, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 556, 13, 557, 0),
woosh.Token(woosh.INDENT, ' ', 557, 0, 557, 12),
woosh.Token(woosh.NAME, 'return', 557, 12, 557, 18),
woosh.Token(woosh.STRING, '"%%(%s)s"', 557, 19, 557, 28),
woosh.Token(woosh.OP, '%', 557, 29, 557, 30),
woosh.Token(woosh.NAME, 'parser', 557, 31, 557, 37),
woosh.Token(woosh.OP, '.', 557, 37, 557, 38),
woosh.Token(woosh.NAME, 'optionxform', 557, 38, 557, 49),
woosh.Token(woosh.OP, '(', 557, 49, 557, 50),
woosh.Token(woosh.NAME, 's', 557, 50, 557, 51),
woosh.Token(woosh.OP, ')', 557, 51, 557, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 557, 52, 558, 0),
woosh.Token(woosh.DEDENT, '', 560, 0, 560, 0),
woosh.Token(woosh.DEDENT, '', 560, 0, 560, 0),
woosh.Token(woosh.DEDENT, '', 560, 0, 560, 0),
woosh.Token(woosh.NAME, 'class', 560, 0, 560, 5),
woosh.Token(woosh.NAME, 'RawConfigParser', 560, 6, 560, 21),
woosh.Token(woosh.OP, '(', 560, 21, 560, 22),
woosh.Token(woosh.NAME, 'MutableMapping', 560, 22, 560, 36),
woosh.Token(woosh.OP, ')', 560, 36, 560, 37),
woosh.Token(woosh.OP, ':', 560, 37, 560, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 560, 38, 561, 0),
woosh.Token(woosh.INDENT, ' ', 561, 0, 561, 4),
woosh.Token(woosh.STRING, '"""ConfigParser that does not do interpolation."""', 561, 4, 561, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 561, 54, 562, 0),
woosh.Token(woosh.COMMENT, '# Regular expressions for parsing section headers and options', 563, 4, 563, 65),
woosh.Token(woosh.NAME, '_SECT_TMPL', 564, 4, 564, 14),
woosh.Token(woosh.OP, '=', 564, 15, 564, 16),
woosh.Token(woosh.STRING, 'r"""\r\n \\[ # [\r\n (?P<header>[^]]+) # very permissive!\r\n \\] # ]\r\n """', 564, 17, 568, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 568, 11, 569, 0),
woosh.Token(woosh.NAME, '_OPT_TMPL', 569, 4, 569, 13),
woosh.Token(woosh.OP, '=', 569, 14, 569, 15),
woosh.Token(woosh.STRING, 'r"""\r\n (?P<option>.*?) # very permissive!\r\n \\s*(?P<vi>{delim})\\s* # any number of space/tab,\r\n # followed by any of the\r\n # allowed delimiters,\r\n # followed by any space/tab\r\n (?P<value>.*)$ # everything up to eol\r\n """', 569, 16, 576, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 576, 11, 577, 0),
woosh.Token(woosh.NAME, '_OPT_NV_TMPL', 577, 4, 577, 16),
woosh.Token(woosh.OP, '=', 577, 17, 577, 18),
woosh.Token(woosh.STRING, 'r"""\r\n (?P<option>.*?) # very permissive!\r\n \\s*(?: # any number of space/tab,\r\n (?P<vi>{delim})\\s* # optionally followed by\r\n # any of the allowed\r\n # delimiters, followed by any\r\n # space/tab\r\n (?P<value>.*))?$ # everything up to eol\r\n """', 577, 19, 585, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 585, 11, 586, 0),
woosh.Token(woosh.COMMENT, '# Interpolation algorithm to be used if the user does not specify another', 586, 4, 586, 77),
woosh.Token(woosh.NAME, '_DEFAULT_INTERPOLATION', 587, 4, 587, 26),
woosh.Token(woosh.OP, '=', 587, 27, 587, 28),
woosh.Token(woosh.NAME, 'Interpolation', 587, 29, 587, 42),
woosh.Token(woosh.OP, '(', 587, 42, 587, 43),
woosh.Token(woosh.OP, ')', 587, 43, 587, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 587, 44, 588, 0),
woosh.Token(woosh.COMMENT, '# Compiled regular expression for matching sections', 588, 4, 588, 55),
woosh.Token(woosh.NAME, 'SECTCRE', 589, 4, 589, 11),
woosh.Token(woosh.OP, '=', 589, 12, 589, 13),
woosh.Token(woosh.NAME, 're', 589, 14, 589, 16),
woosh.Token(woosh.OP, '.', 589, 16, 589, 17),
woosh.Token(woosh.NAME, 'compile', 589, 17, 589, 24),
woosh.Token(woosh.OP, '(', 589, 24, 589, 25),
woosh.Token(woosh.NAME, '_SECT_TMPL', 589, 25, 589, 35),
woosh.Token(woosh.OP, ',', 589, 35, 589, 36),
woosh.Token(woosh.NAME, 're', 589, 37, 589, 39),
woosh.Token(woosh.OP, '.', 589, 39, 589, 40),
woosh.Token(woosh.NAME, 'VERBOSE', 589, 40, 589, 47),
woosh.Token(woosh.OP, ')', 589, 47, 589, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 589, 48, 590, 0),
woosh.Token(woosh.COMMENT, '# Compiled regular expression for matching options with typical separators', 590, 4, 590, 78),
woosh.Token(woosh.NAME, 'OPTCRE', 591, 4, 591, 10),
woosh.Token(woosh.OP, '=', 591, 11, 591, 12),
woosh.Token(woosh.NAME, 're', 591, 13, 591, 15),
woosh.Token(woosh.OP, '.', 591, 15, 591, 16),
woosh.Token(woosh.NAME, 'compile', 591, 16, 591, 23),
woosh.Token(woosh.OP, '(', 591, 23, 591, 24),
woosh.Token(woosh.NAME, '_OPT_TMPL', 591, 24, 591, 33),
woosh.Token(woosh.OP, '.', 591, 33, 591, 34),
woosh.Token(woosh.NAME, 'format', 591, 34, 591, 40),
woosh.Token(woosh.OP, '(', 591, 40, 591, 41),
woosh.Token(woosh.NAME, 'delim', 591, 41, 591, 46),
woosh.Token(woosh.OP, '=', 591, 46, 591, 47),
woosh.Token(woosh.STRING, '"=|:"', 591, 47, 591, 52),
woosh.Token(woosh.OP, ')', 591, 52, 591, 53),
woosh.Token(woosh.OP, ',', 591, 53, 591, 54),
woosh.Token(woosh.NAME, 're', 591, 55, 591, 57),
woosh.Token(woosh.OP, '.', 591, 57, 591, 58),
woosh.Token(woosh.NAME, 'VERBOSE', 591, 58, 591, 65),
woosh.Token(woosh.OP, ')', 591, 65, 591, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 591, 66, 592, 0),
woosh.Token(woosh.COMMENT, '# Compiled regular expression for matching options with optional values', 592, 4, 592, 75),
woosh.Token(woosh.COMMENT, '# delimited using typical separators', 593, 4, 593, 40),
woosh.Token(woosh.NAME, 'OPTCRE_NV', 594, 4, 594, 13),
woosh.Token(woosh.OP, '=', 594, 14, 594, 15),
woosh.Token(woosh.NAME, 're', 594, 16, 594, 18),
woosh.Token(woosh.OP, '.', 594, 18, 594, 19),
woosh.Token(woosh.NAME, 'compile', 594, 19, 594, 26),
woosh.Token(woosh.OP, '(', 594, 26, 594, 27),
woosh.Token(woosh.NAME, '_OPT_NV_TMPL', 594, 27, 594, 39),
woosh.Token(woosh.OP, '.', 594, 39, 594, 40),
woosh.Token(woosh.NAME, 'format', 594, 40, 594, 46),
woosh.Token(woosh.OP, '(', 594, 46, 594, 47),
woosh.Token(woosh.NAME, 'delim', 594, 47, 594, 52),
woosh.Token(woosh.OP, '=', 594, 52, 594, 53),
woosh.Token(woosh.STRING, '"=|:"', 594, 53, 594, 58),
woosh.Token(woosh.OP, ')', 594, 58, 594, 59),
woosh.Token(woosh.OP, ',', 594, 59, 594, 60),
woosh.Token(woosh.NAME, 're', 594, 61, 594, 63),
woosh.Token(woosh.OP, '.', 594, 63, 594, 64),
woosh.Token(woosh.NAME, 'VERBOSE', 594, 64, 594, 71),
woosh.Token(woosh.OP, ')', 594, 71, 594, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 594, 72, 595, 0),
woosh.Token(woosh.COMMENT, '# Compiled regular expression for matching leading whitespace in a line', 595, 4, 595, 75),
woosh.Token(woosh.NAME, 'NONSPACECRE', 596, 4, 596, 15),
woosh.Token(woosh.OP, '=', 596, 16, 596, 17),
woosh.Token(woosh.NAME, 're', 596, 18, 596, 20),
woosh.Token(woosh.OP, '.', 596, 20, 596, 21),
woosh.Token(woosh.NAME, 'compile', 596, 21, 596, 28),
woosh.Token(woosh.OP, '(', 596, 28, 596, 29),
woosh.Token(woosh.STRING, 'r"\\S"', 596, 29, 596, 34),
woosh.Token(woosh.OP, ')', 596, 34, 596, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 596, 35, 597, 0),
woosh.Token(woosh.COMMENT, '# Possible boolean values in the configuration.', 597, 4, 597, 51),
woosh.Token(woosh.NAME, 'BOOLEAN_STATES', 598, 4, 598, 18),
woosh.Token(woosh.OP, '=', 598, 19, 598, 20),
woosh.Token(woosh.OP, '{', 598, 21, 598, 22),
woosh.Token(woosh.STRING, "'1'", 598, 22, 598, 25),
woosh.Token(woosh.OP, ':', 598, 25, 598, 26),
woosh.Token(woosh.NAME, 'True', 598, 27, 598, 31),
woosh.Token(woosh.OP, ',', 598, 31, 598, 32),
woosh.Token(woosh.STRING, "'yes'", 598, 33, 598, 38),
woosh.Token(woosh.OP, ':', 598, 38, 598, 39),
woosh.Token(woosh.NAME, 'True', 598, 40, 598, 44),
woosh.Token(woosh.OP, ',', 598, 44, 598, 45),
woosh.Token(woosh.STRING, "'true'", 598, 46, 598, 52),
woosh.Token(woosh.OP, ':', 598, 52, 598, 53),
woosh.Token(woosh.NAME, 'True', 598, 54, 598, 58),
woosh.Token(woosh.OP, ',', 598, 58, 598, 59),
woosh.Token(woosh.STRING, "'on'", 598, 60, 598, 64),
woosh.Token(woosh.OP, ':', 598, 64, 598, 65),
woosh.Token(woosh.NAME, 'True', 598, 66, 598, 70),
woosh.Token(woosh.OP, ',', 598, 70, 598, 71),
woosh.Token(woosh.STRING, "'0'", 599, 22, 599, 25),
woosh.Token(woosh.OP, ':', 599, 25, 599, 26),
woosh.Token(woosh.NAME, 'False', 599, 27, 599, 32),
woosh.Token(woosh.OP, ',', 599, 32, 599, 33),
woosh.Token(woosh.STRING, "'no'", 599, 34, 599, 38),
woosh.Token(woosh.OP, ':', 599, 38, 599, 39),
woosh.Token(woosh.NAME, 'False', 599, 40, 599, 45),
woosh.Token(woosh.OP, ',', 599, 45, 599, 46),
woosh.Token(woosh.STRING, "'false'", 599, 47, 599, 54),
woosh.Token(woosh.OP, ':', 599, 54, 599, 55),
woosh.Token(woosh.NAME, 'False', 599, 56, 599, 61),
woosh.Token(woosh.OP, ',', 599, 61, 599, 62),
woosh.Token(woosh.STRING, "'off'", 599, 63, 599, 68),
woosh.Token(woosh.OP, ':', 599, 68, 599, 69),
woosh.Token(woosh.NAME, 'False', 599, 70, 599, 75),
woosh.Token(woosh.OP, '}', 599, 75, 599, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 599, 76, 600, 0),
woosh.Token(woosh.NAME, 'def', 601, 4, 601, 7),
woosh.Token(woosh.NAME, '__init__', 601, 8, 601, 16),
woosh.Token(woosh.OP, '(', 601, 16, 601, 17),
woosh.Token(woosh.NAME, 'self', 601, 17, 601, 21),
woosh.Token(woosh.OP, ',', 601, 21, 601, 22),
woosh.Token(woosh.NAME, 'defaults', 601, 23, 601, 31),
woosh.Token(woosh.OP, '=', 601, 31, 601, 32),
woosh.Token(woosh.NAME, 'None', 601, 32, 601, 36),
woosh.Token(woosh.OP, ',', 601, 36, 601, 37),
woosh.Token(woosh.NAME, 'dict_type', 601, 38, 601, 47),
woosh.Token(woosh.OP, '=', 601, 47, 601, 48),
woosh.Token(woosh.NAME, '_default_dict', 601, 48, 601, 61),
woosh.Token(woosh.OP, ',', 601, 61, 601, 62),
woosh.Token(woosh.NAME, 'allow_no_value', 602, 17, 602, 31),
woosh.Token(woosh.OP, '=', 602, 31, 602, 32),
woosh.Token(woosh.NAME, 'False', 602, 32, 602, 37),
woosh.Token(woosh.OP, ',', 602, 37, 602, 38),
woosh.Token(woosh.OP, '*', 602, 39, 602, 40),
woosh.Token(woosh.OP, ',', 602, 40, 602, 41),
woosh.Token(woosh.NAME, 'delimiters', 602, 42, 602, 52),
woosh.Token(woosh.OP, '=', 602, 52, 602, 53),
woosh.Token(woosh.OP, '(', 602, 53, 602, 54),
woosh.Token(woosh.STRING, "'='", 602, 54, 602, 57),
woosh.Token(woosh.OP, ',', 602, 57, 602, 58),
woosh.Token(woosh.STRING, "':'", 602, 59, 602, 62),
woosh.Token(woosh.OP, ')', 602, 62, 602, 63),
woosh.Token(woosh.OP, ',', 602, 63, 602, 64),
woosh.Token(woosh.NAME, 'comment_prefixes', 603, 17, 603, 33),
woosh.Token(woosh.OP, '=', 603, 33, 603, 34),
woosh.Token(woosh.OP, '(', 603, 34, 603, 35),
woosh.Token(woosh.STRING, "'#'", 603, 35, 603, 38),
woosh.Token(woosh.OP, ',', 603, 38, 603, 39),
woosh.Token(woosh.STRING, "';'", 603, 40, 603, 43),
woosh.Token(woosh.OP, ')', 603, 43, 603, 44),
woosh.Token(woosh.OP, ',', 603, 44, 603, 45),
woosh.Token(woosh.NAME, 'inline_comment_prefixes', 603, 46, 603, 69),
woosh.Token(woosh.OP, '=', 603, 69, 603, 70),
woosh.Token(woosh.NAME, 'None', 603, 70, 603, 74),
woosh.Token(woosh.OP, ',', 603, 74, 603, 75),
woosh.Token(woosh.NAME, 'strict', 604, 17, 604, 23),
woosh.Token(woosh.OP, '=', 604, 23, 604, 24),
woosh.Token(woosh.NAME, 'True', 604, 24, 604, 28),
woosh.Token(woosh.OP, ',', 604, 28, 604, 29),
woosh.Token(woosh.NAME, 'empty_lines_in_values', 604, 30, 604, 51),
woosh.Token(woosh.OP, '=', 604, 51, 604, 52),
woosh.Token(woosh.NAME, 'True', 604, 52, 604, 56),
woosh.Token(woosh.OP, ',', 604, 56, 604, 57),
woosh.Token(woosh.NAME, 'default_section', 605, 17, 605, 32),
woosh.Token(woosh.OP, '=', 605, 32, 605, 33),
woosh.Token(woosh.NAME, 'DEFAULTSECT', 605, 33, 605, 44),
woosh.Token(woosh.OP, ',', 605, 44, 605, 45),
woosh.Token(woosh.NAME, 'interpolation', 606, 17, 606, 30),
woosh.Token(woosh.OP, '=', 606, 30, 606, 31),
woosh.Token(woosh.NAME, '_UNSET', 606, 31, 606, 37),
woosh.Token(woosh.OP, ',', 606, 37, 606, 38),
woosh.Token(woosh.NAME, 'converters', 606, 39, 606, 49),
woosh.Token(woosh.OP, '=', 606, 49, 606, 50),
woosh.Token(woosh.NAME, '_UNSET', 606, 50, 606, 56),
woosh.Token(woosh.OP, ')', 606, 56, 606, 57),
woosh.Token(woosh.OP, ':', 606, 57, 606, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 606, 58, 607, 0),
woosh.Token(woosh.INDENT, ' ', 608, 0, 608, 8),
woosh.Token(woosh.NAME, 'self', 608, 8, 608, 12),
woosh.Token(woosh.OP, '.', 608, 12, 608, 13),
woosh.Token(woosh.NAME, '_dict', 608, 13, 608, 18),
woosh.Token(woosh.OP, '=', 608, 19, 608, 20),
woosh.Token(woosh.NAME, 'dict_type', 608, 21, 608, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 608, 30, 609, 0),
woosh.Token(woosh.NAME, 'self', 609, 8, 609, 12),
woosh.Token(woosh.OP, '.', 609, 12, 609, 13),
woosh.Token(woosh.NAME, '_sections', 609, 13, 609, 22),
woosh.Token(woosh.OP, '=', 609, 23, 609, 24),
woosh.Token(woosh.NAME, 'self', 609, 25, 609, 29),
woosh.Token(woosh.OP, '.', 609, 29, 609, 30),
woosh.Token(woosh.NAME, '_dict', 609, 30, 609, 35),
woosh.Token(woosh.OP, '(', 609, 35, 609, 36),
woosh.Token(woosh.OP, ')', 609, 36, 609, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 609, 37, 610, 0),
woosh.Token(woosh.NAME, 'self', 610, 8, 610, 12),
woosh.Token(woosh.OP, '.', 610, 12, 610, 13),
woosh.Token(woosh.NAME, '_defaults', 610, 13, 610, 22),
woosh.Token(woosh.OP, '=', 610, 23, 610, 24),
woosh.Token(woosh.NAME, 'self', 610, 25, 610, 29),
woosh.Token(woosh.OP, '.', 610, 29, 610, 30),
woosh.Token(woosh.NAME, '_dict', 610, 30, 610, 35),
woosh.Token(woosh.OP, '(', 610, 35, 610, 36),
woosh.Token(woosh.OP, ')', 610, 36, 610, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 610, 37, 611, 0),
woosh.Token(woosh.NAME, 'self', 611, 8, 611, 12),
woosh.Token(woosh.OP, '.', 611, 12, 611, 13),
woosh.Token(woosh.NAME, '_converters', 611, 13, 611, 24),
woosh.Token(woosh.OP, '=', 611, 25, 611, 26),
woosh.Token(woosh.NAME, 'ConverterMapping', 611, 27, 611, 43),
woosh.Token(woosh.OP, '(', 611, 43, 611, 44),
woosh.Token(woosh.NAME, 'self', 611, 44, 611, 48),
woosh.Token(woosh.OP, ')', 611, 48, 611, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 611, 49, 612, 0),
woosh.Token(woosh.NAME, 'self', 612, 8, 612, 12),
woosh.Token(woosh.OP, '.', 612, 12, 612, 13),
woosh.Token(woosh.NAME, '_proxies', 612, 13, 612, 21),
woosh.Token(woosh.OP, '=', 612, 22, 612, 23),
woosh.Token(woosh.NAME, 'self', 612, 24, 612, 28),
woosh.Token(woosh.OP, '.', 612, 28, 612, 29),
woosh.Token(woosh.NAME, '_dict', 612, 29, 612, 34),
woosh.Token(woosh.OP, '(', 612, 34, 612, 35),
woosh.Token(woosh.OP, ')', 612, 35, 612, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 612, 36, 613, 0),
woosh.Token(woosh.NAME, 'self', 613, 8, 613, 12),
woosh.Token(woosh.OP, '.', 613, 12, 613, 13),
woosh.Token(woosh.NAME, '_proxies', 613, 13, 613, 21),
woosh.Token(woosh.OP, '[', 613, 21, 613, 22),
woosh.Token(woosh.NAME, 'default_section', 613, 22, 613, 37),
woosh.Token(woosh.OP, ']', 613, 37, 613, 38),
woosh.Token(woosh.OP, '=', 613, 39, 613, 40),
woosh.Token(woosh.NAME, 'SectionProxy', 613, 41, 613, 53),
woosh.Token(woosh.OP, '(', 613, 53, 613, 54),
woosh.Token(woosh.NAME, 'self', 613, 54, 613, 58),
woosh.Token(woosh.OP, ',', 613, 58, 613, 59),
woosh.Token(woosh.NAME, 'default_section', 613, 60, 613, 75),
woosh.Token(woosh.OP, ')', 613, 75, 613, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 613, 76, 614, 0),
woosh.Token(woosh.NAME, 'self', 614, 8, 614, 12),
woosh.Token(woosh.OP, '.', 614, 12, 614, 13),
woosh.Token(woosh.NAME, '_delimiters', 614, 13, 614, 24),
woosh.Token(woosh.OP, '=', 614, 25, 614, 26),
woosh.Token(woosh.NAME, 'tuple', 614, 27, 614, 32),
woosh.Token(woosh.OP, '(', 614, 32, 614, 33),
woosh.Token(woosh.NAME, 'delimiters', 614, 33, 614, 43),
woosh.Token(woosh.OP, ')', 614, 43, 614, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 614, 44, 615, 0),
woosh.Token(woosh.NAME, 'if', 615, 8, 615, 10),
woosh.Token(woosh.NAME, 'delimiters', 615, 11, 615, 21),
woosh.Token(woosh.OP, '==', 615, 22, 615, 24),
woosh.Token(woosh.OP, '(', 615, 25, 615, 26),
woosh.Token(woosh.STRING, "'='", 615, 26, 615, 29),
woosh.Token(woosh.OP, ',', 615, 29, 615, 30),
woosh.Token(woosh.STRING, "':'", 615, 31, 615, 34),
woosh.Token(woosh.OP, ')', 615, 34, 615, 35),
woosh.Token(woosh.OP, ':', 615, 35, 615, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 615, 36, 616, 0),
woosh.Token(woosh.INDENT, ' ', 616, 0, 616, 12),
woosh.Token(woosh.NAME, 'self', 616, 12, 616, 16),
woosh.Token(woosh.OP, '.', 616, 16, 616, 17),
woosh.Token(woosh.NAME, '_optcre', 616, 17, 616, 24),
woosh.Token(woosh.OP, '=', 616, 25, 616, 26),
woosh.Token(woosh.NAME, 'self', 616, 27, 616, 31),
woosh.Token(woosh.OP, '.', 616, 31, 616, 32),
woosh.Token(woosh.NAME, 'OPTCRE_NV', 616, 32, 616, 41),
woosh.Token(woosh.NAME, 'if', 616, 42, 616, 44),
woosh.Token(woosh.NAME, 'allow_no_value', 616, 45, 616, 59),
woosh.Token(woosh.NAME, 'else', 616, 60, 616, 64),
woosh.Token(woosh.NAME, 'self', 616, 65, 616, 69),
woosh.Token(woosh.OP, '.', 616, 69, 616, 70),
woosh.Token(woosh.NAME, 'OPTCRE', 616, 70, 616, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 616, 76, 617, 0),
woosh.Token(woosh.DEDENT, ' ', 617, 0, 617, 8),
woosh.Token(woosh.NAME, 'else', 617, 8, 617, 12),
woosh.Token(woosh.OP, ':', 617, 12, 617, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 617, 13, 618, 0),
woosh.Token(woosh.INDENT, ' ', 618, 0, 618, 12),
woosh.Token(woosh.NAME, 'd', 618, 12, 618, 13),
woosh.Token(woosh.OP, '=', 618, 14, 618, 15),
woosh.Token(woosh.STRING, '"|"', 618, 16, 618, 19),
woosh.Token(woosh.OP, '.', 618, 19, 618, 20),
woosh.Token(woosh.NAME, 'join', 618, 20, 618, 24),
woosh.Token(woosh.OP, '(', 618, 24, 618, 25),
woosh.Token(woosh.NAME, 're', 618, 25, 618, 27),
woosh.Token(woosh.OP, '.', 618, 27, 618, 28),
woosh.Token(woosh.NAME, 'escape', 618, 28, 618, 34),
woosh.Token(woosh.OP, '(', 618, 34, 618, 35),
woosh.Token(woosh.NAME, 'd', 618, 35, 618, 36),
woosh.Token(woosh.OP, ')', 618, 36, 618, 37),
woosh.Token(woosh.NAME, 'for', 618, 38, 618, 41),
woosh.Token(woosh.NAME, 'd', 618, 42, 618, 43),
woosh.Token(woosh.NAME, 'in', 618, 44, 618, 46),
woosh.Token(woosh.NAME, 'delimiters', 618, 47, 618, 57),
woosh.Token(woosh.OP, ')', 618, 57, 618, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 618, 58, 619, 0),
woosh.Token(woosh.NAME, 'if', 619, 12, 619, 14),
woosh.Token(woosh.NAME, 'allow_no_value', 619, 15, 619, 29),
woosh.Token(woosh.OP, ':', 619, 29, 619, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 619, 30, 620, 0),
woosh.Token(woosh.INDENT, ' ', 620, 0, 620, 16),
woosh.Token(woosh.NAME, 'self', 620, 16, 620, 20),
woosh.Token(woosh.OP, '.', 620, 20, 620, 21),
woosh.Token(woosh.NAME, '_optcre', 620, 21, 620, 28),
woosh.Token(woosh.OP, '=', 620, 29, 620, 30),
woosh.Token(woosh.NAME, 're', 620, 31, 620, 33),
woosh.Token(woosh.OP, '.', 620, 33, 620, 34),
woosh.Token(woosh.NAME, 'compile', 620, 34, 620, 41),
woosh.Token(woosh.OP, '(', 620, 41, 620, 42),
woosh.Token(woosh.NAME, 'self', 620, 42, 620, 46),
woosh.Token(woosh.OP, '.', 620, 46, 620, 47),
woosh.Token(woosh.NAME, '_OPT_NV_TMPL', 620, 47, 620, 59),
woosh.Token(woosh.OP, '.', 620, 59, 620, 60),
woosh.Token(woosh.NAME, 'format', 620, 60, 620, 66),
woosh.Token(woosh.OP, '(', 620, 66, 620, 67),
woosh.Token(woosh.NAME, 'delim', 620, 67, 620, 72),
woosh.Token(woosh.OP, '=', 620, 72, 620, 73),
woosh.Token(woosh.NAME, 'd', 620, 73, 620, 74),
woosh.Token(woosh.OP, ')', 620, 74, 620, 75),
woosh.Token(woosh.OP, ',', 620, 75, 620, 76),
woosh.Token(woosh.NAME, 're', 621, 42, 621, 44),
woosh.Token(woosh.OP, '.', 621, 44, 621, 45),
woosh.Token(woosh.NAME, 'VERBOSE', 621, 45, 621, 52),
woosh.Token(woosh.OP, ')', 621, 52, 621, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 621, 53, 622, 0),
woosh.Token(woosh.DEDENT, ' ', 622, 0, 622, 12),
woosh.Token(woosh.NAME, 'else', 622, 12, 622, 16),
woosh.Token(woosh.OP, ':', 622, 16, 622, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 622, 17, 623, 0),
woosh.Token(woosh.INDENT, ' ', 623, 0, 623, 16),
woosh.Token(woosh.NAME, 'self', 623, 16, 623, 20),
woosh.Token(woosh.OP, '.', 623, 20, 623, 21),
woosh.Token(woosh.NAME, '_optcre', 623, 21, 623, 28),
woosh.Token(woosh.OP, '=', 623, 29, 623, 30),
woosh.Token(woosh.NAME, 're', 623, 31, 623, 33),
woosh.Token(woosh.OP, '.', 623, 33, 623, 34),
woosh.Token(woosh.NAME, 'compile', 623, 34, 623, 41),
woosh.Token(woosh.OP, '(', 623, 41, 623, 42),
woosh.Token(woosh.NAME, 'self', 623, 42, 623, 46),
woosh.Token(woosh.OP, '.', 623, 46, 623, 47),
woosh.Token(woosh.NAME, '_OPT_TMPL', 623, 47, 623, 56),
woosh.Token(woosh.OP, '.', 623, 56, 623, 57),
woosh.Token(woosh.NAME, 'format', 623, 57, 623, 63),
woosh.Token(woosh.OP, '(', 623, 63, 623, 64),
woosh.Token(woosh.NAME, 'delim', 623, 64, 623, 69),
woosh.Token(woosh.OP, '=', 623, 69, 623, 70),
woosh.Token(woosh.NAME, 'd', 623, 70, 623, 71),
woosh.Token(woosh.OP, ')', 623, 71, 623, 72),
woosh.Token(woosh.OP, ',', 623, 72, 623, 73),
woosh.Token(woosh.NAME, 're', 624, 42, 624, 44),
woosh.Token(woosh.OP, '.', 624, 44, 624, 45),
woosh.Token(woosh.NAME, 'VERBOSE', 624, 45, 624, 52),
woosh.Token(woosh.OP, ')', 624, 52, 624, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 624, 53, 625, 0),
woosh.Token(woosh.DEDENT, ' ', 625, 0, 625, 8),
woosh.Token(woosh.DEDENT, '', 625, 8, 625, 8),
woosh.Token(woosh.NAME, 'self', 625, 8, 625, 12),
woosh.Token(woosh.OP, '.', 625, 12, 625, 13),
woosh.Token(woosh.NAME, '_comment_prefixes', 625, 13, 625, 30),
woosh.Token(woosh.OP, '=', 625, 31, 625, 32),
woosh.Token(woosh.NAME, 'tuple', 625, 33, 625, 38),
woosh.Token(woosh.OP, '(', 625, 38, 625, 39),
woosh.Token(woosh.NAME, 'comment_prefixes', 625, 39, 625, 55),
woosh.Token(woosh.NAME, 'or', 625, 56, 625, 58),
woosh.Token(woosh.OP, '(', 625, 59, 625, 60),
woosh.Token(woosh.OP, ')', 625, 60, 625, 61),
woosh.Token(woosh.OP, ')', 625, 61, 625, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 625, 62, 626, 0),
woosh.Token(woosh.NAME, 'self', 626, 8, 626, 12),
woosh.Token(woosh.OP, '.', 626, 12, 626, 13),
woosh.Token(woosh.NAME, '_inline_comment_prefixes', 626, 13, 626, 37),
woosh.Token(woosh.OP, '=', 626, 38, 626, 39),
woosh.Token(woosh.NAME, 'tuple', 626, 40, 626, 45),
woosh.Token(woosh.OP, '(', 626, 45, 626, 46),
woosh.Token(woosh.NAME, 'inline_comment_prefixes', 626, 46, 626, 69),
woosh.Token(woosh.NAME, 'or', 626, 70, 626, 72),
woosh.Token(woosh.OP, '(', 626, 73, 626, 74),
woosh.Token(woosh.OP, ')', 626, 74, 626, 75),
woosh.Token(woosh.OP, ')', 626, 75, 626, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 626, 76, 627, 0),
woosh.Token(woosh.NAME, 'self', 627, 8, 627, 12),
woosh.Token(woosh.OP, '.', 627, 12, 627, 13),
woosh.Token(woosh.NAME, '_strict', 627, 13, 627, 20),
woosh.Token(woosh.OP, '=', 627, 21, 627, 22),
woosh.Token(woosh.NAME, 'strict', 627, 23, 627, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 627, 29, 628, 0),
woosh.Token(woosh.NAME, 'self', 628, 8, 628, 12),
woosh.Token(woosh.OP, '.', 628, 12, 628, 13),
woosh.Token(woosh.NAME, '_allow_no_value', 628, 13, 628, 28),
woosh.Token(woosh.OP, '=', 628, 29, 628, 30),
woosh.Token(woosh.NAME, 'allow_no_value', 628, 31, 628, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 628, 45, 629, 0),
woosh.Token(woosh.NAME, 'self', 629, 8, 629, 12),
woosh.Token(woosh.OP, '.', 629, 12, 629, 13),
woosh.Token(woosh.NAME, '_empty_lines_in_values', 629, 13, 629, 35),
woosh.Token(woosh.OP, '=', 629, 36, 629, 37),
woosh.Token(woosh.NAME, 'empty_lines_in_values', 629, 38, 629, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 629, 59, 630, 0),
woosh.Token(woosh.NAME, 'self', 630, 8, 630, 12),
woosh.Token(woosh.OP, '.', 630, 12, 630, 13),
woosh.Token(woosh.NAME, 'default_section', 630, 13, 630, 28),
woosh.Token(woosh.OP, '=', 630, 28, 630, 29),
woosh.Token(woosh.NAME, 'default_section', 630, 29, 630, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 630, 44, 631, 0),
woosh.Token(woosh.NAME, 'self', 631, 8, 631, 12),
woosh.Token(woosh.OP, '.', 631, 12, 631, 13),
woosh.Token(woosh.NAME, '_interpolation', 631, 13, 631, 27),
woosh.Token(woosh.OP, '=', 631, 28, 631, 29),
woosh.Token(woosh.NAME, 'interpolation', 631, 30, 631, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 631, 43, 632, 0),
woosh.Token(woosh.NAME, 'if', 632, 8, 632, 10),
woosh.Token(woosh.NAME, 'self', 632, 11, 632, 15),
woosh.Token(woosh.OP, '.', 632, 15, 632, 16),
woosh.Token(woosh.NAME, '_interpolation', 632, 16, 632, 30),
woosh.Token(woosh.NAME, 'is', 632, 31, 632, 33),
woosh.Token(woosh.NAME, '_UNSET', 632, 34, 632, 40),
woosh.Token(woosh.OP, ':', 632, 40, 632, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 632, 41, 633, 0),
woosh.Token(woosh.INDENT, ' ', 633, 0, 633, 12),
woosh.Token(woosh.NAME, 'self', 633, 12, 633, 16),
woosh.Token(woosh.OP, '.', 633, 16, 633, 17),
woosh.Token(woosh.NAME, '_interpolation', 633, 17, 633, 31),
woosh.Token(woosh.OP, '=', 633, 32, 633, 33),
woosh.Token(woosh.NAME, 'self', 633, 34, 633, 38),
woosh.Token(woosh.OP, '.', 633, 38, 633, 39),
woosh.Token(woosh.NAME, '_DEFAULT_INTERPOLATION', 633, 39, 633, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 633, 61, 634, 0),
woosh.Token(woosh.DEDENT, ' ', 634, 0, 634, 8),
woosh.Token(woosh.NAME, 'if', 634, 8, 634, 10),
woosh.Token(woosh.NAME, 'self', 634, 11, 634, 15),
woosh.Token(woosh.OP, '.', 634, 15, 634, 16),
woosh.Token(woosh.NAME, '_interpolation', 634, 16, 634, 30),
woosh.Token(woosh.NAME, 'is', 634, 31, 634, 33),
woosh.Token(woosh.NAME, 'None', 634, 34, 634, 38),
woosh.Token(woosh.OP, ':', 634, 38, 634, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 634, 39, 635, 0),
woosh.Token(woosh.INDENT, ' ', 635, 0, 635, 12),
woosh.Token(woosh.NAME, 'self', 635, 12, 635, 16),
woosh.Token(woosh.OP, '.', 635, 16, 635, 17),
woosh.Token(woosh.NAME, '_interpolation', 635, 17, 635, 31),
woosh.Token(woosh.OP, '=', 635, 32, 635, 33),
woosh.Token(woosh.NAME, 'Interpolation', 635, 34, 635, 47),
woosh.Token(woosh.OP, '(', 635, 47, 635, 48),
woosh.Token(woosh.OP, ')', 635, 48, 635, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 635, 49, 636, 0),
woosh.Token(woosh.DEDENT, ' ', 636, 0, 636, 8),
woosh.Token(woosh.NAME, 'if', 636, 8, 636, 10),
woosh.Token(woosh.NAME, 'converters', 636, 11, 636, 21),
woosh.Token(woosh.NAME, 'is', 636, 22, 636, 24),
woosh.Token(woosh.NAME, 'not', 636, 25, 636, 28),
woosh.Token(woosh.NAME, '_UNSET', 636, 29, 636, 35),
woosh.Token(woosh.OP, ':', 636, 35, 636, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 636, 36, 637, 0),
woosh.Token(woosh.INDENT, ' ', 637, 0, 637, 12),
woosh.Token(woosh.NAME, 'self', 637, 12, 637, 16),
woosh.Token(woosh.OP, '.', 637, 16, 637, 17),
woosh.Token(woosh.NAME, '_converters', 637, 17, 637, 28),
woosh.Token(woosh.OP, '.', 637, 28, 637, 29),
woosh.Token(woosh.NAME, 'update', 637, 29, 637, 35),
woosh.Token(woosh.OP, '(', 637, 35, 637, 36),
woosh.Token(woosh.NAME, 'converters', 637, 36, 637, 46),
woosh.Token(woosh.OP, ')', 637, 46, 637, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 637, 47, 638, 0),
woosh.Token(woosh.DEDENT, ' ', 638, 0, 638, 8),
woosh.Token(woosh.NAME, 'if', 638, 8, 638, 10),
woosh.Token(woosh.NAME, 'defaults', 638, 11, 638, 19),
woosh.Token(woosh.OP, ':', 638, 19, 638, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 638, 20, 639, 0),
woosh.Token(woosh.INDENT, ' ', 639, 0, 639, 12),
woosh.Token(woosh.NAME, 'self', 639, 12, 639, 16),
woosh.Token(woosh.OP, '.', 639, 16, 639, 17),
woosh.Token(woosh.NAME, '_read_defaults', 639, 17, 639, 31),
woosh.Token(woosh.OP, '(', 639, 31, 639, 32),
woosh.Token(woosh.NAME, 'defaults', 639, 32, 639, 40),
woosh.Token(woosh.OP, ')', 639, 40, 639, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 639, 41, 640, 0),
woosh.Token(woosh.DEDENT, ' ', 641, 0, 641, 4),
woosh.Token(woosh.DEDENT, '', 641, 4, 641, 4),
woosh.Token(woosh.NAME, 'def', 641, 4, 641, 7),
woosh.Token(woosh.NAME, 'defaults', 641, 8, 641, 16),
woosh.Token(woosh.OP, '(', 641, 16, 641, 17),
woosh.Token(woosh.NAME, 'self', 641, 17, 641, 21),
woosh.Token(woosh.OP, ')', 641, 21, 641, 22),
woosh.Token(woosh.OP, ':', 641, 22, 641, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 641, 23, 642, 0),
woosh.Token(woosh.INDENT, ' ', 642, 0, 642, 8),
woosh.Token(woosh.NAME, 'return', 642, 8, 642, 14),
woosh.Token(woosh.NAME, 'self', 642, 15, 642, 19),
woosh.Token(woosh.OP, '.', 642, 19, 642, 20),
woosh.Token(woosh.NAME, '_defaults', 642, 20, 642, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 642, 29, 643, 0),
woosh.Token(woosh.DEDENT, ' ', 644, 0, 644, 4),
woosh.Token(woosh.NAME, 'def', 644, 4, 644, 7),
woosh.Token(woosh.NAME, 'sections', 644, 8, 644, 16),
woosh.Token(woosh.OP, '(', 644, 16, 644, 17),
woosh.Token(woosh.NAME, 'self', 644, 17, 644, 21),
woosh.Token(woosh.OP, ')', 644, 21, 644, 22),
woosh.Token(woosh.OP, ':', 644, 22, 644, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 644, 23, 645, 0),
woosh.Token(woosh.INDENT, ' ', 645, 0, 645, 8),
woosh.Token(woosh.STRING, '"""Return a list of section names, excluding [DEFAULT]"""', 645, 8, 645, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 645, 65, 646, 0),
woosh.Token(woosh.COMMENT, '# self._sections will never have [DEFAULT] in it', 646, 8, 646, 56),
woosh.Token(woosh.NAME, 'return', 647, 8, 647, 14),
woosh.Token(woosh.NAME, 'list', 647, 15, 647, 19),
woosh.Token(woosh.OP, '(', 647, 19, 647, 20),
woosh.Token(woosh.NAME, 'self', 647, 20, 647, 24),
woosh.Token(woosh.OP, '.', 647, 24, 647, 25),
woosh.Token(woosh.NAME, '_sections', 647, 25, 647, 34),
woosh.Token(woosh.OP, '.', 647, 34, 647, 35),
woosh.Token(woosh.NAME, 'keys', 647, 35, 647, 39),
woosh.Token(woosh.OP, '(', 647, 39, 647, 40),
woosh.Token(woosh.OP, ')', 647, 40, 647, 41),
woosh.Token(woosh.OP, ')', 647, 41, 647, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 647, 42, 648, 0),
woosh.Token(woosh.DEDENT, ' ', 649, 0, 649, 4),
woosh.Token(woosh.NAME, 'def', 649, 4, 649, 7),
woosh.Token(woosh.NAME, 'add_section', 649, 8, 649, 19),
woosh.Token(woosh.OP, '(', 649, 19, 649, 20),
woosh.Token(woosh.NAME, 'self', 649, 20, 649, 24),
woosh.Token(woosh.OP, ',', 649, 24, 649, 25),
woosh.Token(woosh.NAME, 'section', 649, 26, 649, 33),
woosh.Token(woosh.OP, ')', 649, 33, 649, 34),
woosh.Token(woosh.OP, ':', 649, 34, 649, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 649, 35, 650, 0),
woosh.Token(woosh.INDENT, ' ', 650, 0, 650, 8),
woosh.Token(woosh.STRING, '"""Create a new section in the configuration.\r\n\r\n Raise DuplicateSectionError if a section by the specified name\r\n already exists. Raise ValueError if name is DEFAULT.\r\n """', 650, 8, 654, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 654, 11, 655, 0),
woosh.Token(woosh.NAME, 'if', 655, 8, 655, 10),
woosh.Token(woosh.NAME, 'section', 655, 11, 655, 18),
woosh.Token(woosh.OP, '==', 655, 19, 655, 21),
woosh.Token(woosh.NAME, 'self', 655, 22, 655, 26),
woosh.Token(woosh.OP, '.', 655, 26, 655, 27),
woosh.Token(woosh.NAME, 'default_section', 655, 27, 655, 42),
woosh.Token(woosh.OP, ':', 655, 42, 655, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 655, 43, 656, 0),
woosh.Token(woosh.INDENT, ' ', 656, 0, 656, 12),
woosh.Token(woosh.NAME, 'raise', 656, 12, 656, 17),
woosh.Token(woosh.NAME, 'ValueError', 656, 18, 656, 28),
woosh.Token(woosh.OP, '(', 656, 28, 656, 29),
woosh.Token(woosh.STRING, "'Invalid section name: %r'", 656, 29, 656, 55),
woosh.Token(woosh.OP, '%', 656, 56, 656, 57),
woosh.Token(woosh.NAME, 'section', 656, 58, 656, 65),
woosh.Token(woosh.OP, ')', 656, 65, 656, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 656, 66, 657, 0),
woosh.Token(woosh.DEDENT, ' ', 658, 0, 658, 8),
woosh.Token(woosh.NAME, 'if', 658, 8, 658, 10),
woosh.Token(woosh.NAME, 'section', 658, 11, 658, 18),
woosh.Token(woosh.NAME, 'in', 658, 19, 658, 21),
woosh.Token(woosh.NAME, 'self', 658, 22, 658, 26),
woosh.Token(woosh.OP, '.', 658, 26, 658, 27),
woosh.Token(woosh.NAME, '_sections', 658, 27, 658, 36),
woosh.Token(woosh.OP, ':', 658, 36, 658, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 658, 37, 659, 0),
woosh.Token(woosh.INDENT, ' ', 659, 0, 659, 12),
woosh.Token(woosh.NAME, 'raise', 659, 12, 659, 17),
woosh.Token(woosh.NAME, 'DuplicateSectionError', 659, 18, 659, 39),
woosh.Token(woosh.OP, '(', 659, 39, 659, 40),
woosh.Token(woosh.NAME, 'section', 659, 40, 659, 47),
woosh.Token(woosh.OP, ')', 659, 47, 659, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 659, 48, 660, 0),
woosh.Token(woosh.DEDENT, ' ', 660, 0, 660, 8),
woosh.Token(woosh.NAME, 'self', 660, 8, 660, 12),
woosh.Token(woosh.OP, '.', 660, 12, 660, 13),
woosh.Token(woosh.NAME, '_sections', 660, 13, 660, 22),
woosh.Token(woosh.OP, '[', 660, 22, 660, 23),
woosh.Token(woosh.NAME, 'section', 660, 23, 660, 30),
woosh.Token(woosh.OP, ']', 660, 30, 660, 31),
woosh.Token(woosh.OP, '=', 660, 32, 660, 33),
woosh.Token(woosh.NAME, 'self', 660, 34, 660, 38),
woosh.Token(woosh.OP, '.', 660, 38, 660, 39),
woosh.Token(woosh.NAME, '_dict', 660, 39, 660, 44),
woosh.Token(woosh.OP, '(', 660, 44, 660, 45),
woosh.Token(woosh.OP, ')', 660, 45, 660, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 660, 46, 661, 0),
woosh.Token(woosh.NAME, 'self', 661, 8, 661, 12),
woosh.Token(woosh.OP, '.', 661, 12, 661, 13),
woosh.Token(woosh.NAME, '_proxies', 661, 13, 661, 21),
woosh.Token(woosh.OP, '[', 661, 21, 661, 22),
woosh.Token(woosh.NAME, 'section', 661, 22, 661, 29),
woosh.Token(woosh.OP, ']', 661, 29, 661, 30),
woosh.Token(woosh.OP, '=', 661, 31, 661, 32),
woosh.Token(woosh.NAME, 'SectionProxy', 661, 33, 661, 45),
woosh.Token(woosh.OP, '(', 661, 45, 661, 46),
woosh.Token(woosh.NAME, 'self', 661, 46, 661, 50),
woosh.Token(woosh.OP, ',', 661, 50, 661, 51),
woosh.Token(woosh.NAME, 'section', 661, 52, 661, 59),
woosh.Token(woosh.OP, ')', 661, 59, 661, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 661, 60, 662, 0),
woosh.Token(woosh.DEDENT, ' ', 663, 0, 663, 4),
woosh.Token(woosh.NAME, 'def', 663, 4, 663, 7),
woosh.Token(woosh.NAME, 'has_section', 663, 8, 663, 19),
woosh.Token(woosh.OP, '(', 663, 19, 663, 20),
woosh.Token(woosh.NAME, 'self', 663, 20, 663, 24),
woosh.Token(woosh.OP, ',', 663, 24, 663, 25),
woosh.Token(woosh.NAME, 'section', 663, 26, 663, 33),
woosh.Token(woosh.OP, ')', 663, 33, 663, 34),
woosh.Token(woosh.OP, ':', 663, 34, 663, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 663, 35, 664, 0),
woosh.Token(woosh.INDENT, ' ', 664, 0, 664, 8),
woosh.Token(woosh.STRING, '"""Indicate whether the named section is present in the configuration.\r\n\r\n The DEFAULT section is not acknowledged.\r\n """', 664, 8, 667, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 667, 11, 668, 0),
woosh.Token(woosh.NAME, 'return', 668, 8, 668, 14),
woosh.Token(woosh.NAME, 'section', 668, 15, 668, 22),
woosh.Token(woosh.NAME, 'in', 668, 23, 668, 25),
woosh.Token(woosh.NAME, 'self', 668, 26, 668, 30),
woosh.Token(woosh.OP, '.', 668, 30, 668, 31),
woosh.Token(woosh.NAME, '_sections', 668, 31, 668, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 668, 40, 669, 0),
woosh.Token(woosh.DEDENT, ' ', 670, 0, 670, 4),
woosh.Token(woosh.NAME, 'def', 670, 4, 670, 7),
woosh.Token(woosh.NAME, 'options', 670, 8, 670, 15),
woosh.Token(woosh.OP, '(', 670, 15, 670, 16),
woosh.Token(woosh.NAME, 'self', 670, 16, 670, 20),
woosh.Token(woosh.OP, ',', 670, 20, 670, 21),
woosh.Token(woosh.NAME, 'section', 670, 22, 670, 29),
woosh.Token(woosh.OP, ')', 670, 29, 670, 30),
woosh.Token(woosh.OP, ':', 670, 30, 670, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 670, 31, 671, 0),
woosh.Token(woosh.INDENT, ' ', 671, 0, 671, 8),
woosh.Token(woosh.STRING, '"""Return a list of option names for the given section name."""', 671, 8, 671, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 671, 71, 672, 0),
woosh.Token(woosh.NAME, 'try', 672, 8, 672, 11),
woosh.Token(woosh.OP, ':', 672, 11, 672, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 672, 12, 673, 0),
woosh.Token(woosh.INDENT, ' ', 673, 0, 673, 12),
woosh.Token(woosh.NAME, 'opts', 673, 12, 673, 16),
woosh.Token(woosh.OP, '=', 673, 17, 673, 18),
woosh.Token(woosh.NAME, 'self', 673, 19, 673, 23),
woosh.Token(woosh.OP, '.', 673, 23, 673, 24),
woosh.Token(woosh.NAME, '_sections', 673, 24, 673, 33),
woosh.Token(woosh.OP, '[', 673, 33, 673, 34),
woosh.Token(woosh.NAME, 'section', 673, 34, 673, 41),
woosh.Token(woosh.OP, ']', 673, 41, 673, 42),
woosh.Token(woosh.OP, '.', 673, 42, 673, 43),
woosh.Token(woosh.NAME, 'copy', 673, 43, 673, 47),
woosh.Token(woosh.OP, '(', 673, 47, 673, 48),
woosh.Token(woosh.OP, ')', 673, 48, 673, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 673, 49, 674, 0),
woosh.Token(woosh.DEDENT, ' ', 674, 0, 674, 8),
woosh.Token(woosh.NAME, 'except', 674, 8, 674, 14),
woosh.Token(woosh.NAME, 'KeyError', 674, 15, 674, 23),
woosh.Token(woosh.OP, ':', 674, 23, 674, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 674, 24, 675, 0),
woosh.Token(woosh.INDENT, ' ', 675, 0, 675, 12),
woosh.Token(woosh.NAME, 'raise', 675, 12, 675, 17),
woosh.Token(woosh.NAME, 'NoSectionError', 675, 18, 675, 32),
woosh.Token(woosh.OP, '(', 675, 32, 675, 33),
woosh.Token(woosh.NAME, 'section', 675, 33, 675, 40),
woosh.Token(woosh.OP, ')', 675, 40, 675, 41),
woosh.Token(woosh.NAME, 'from', 675, 42, 675, 46),
woosh.Token(woosh.NAME, 'None', 675, 47, 675, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 675, 51, 676, 0),
woosh.Token(woosh.DEDENT, ' ', 676, 0, 676, 8),
woosh.Token(woosh.NAME, 'opts', 676, 8, 676, 12),
woosh.Token(woosh.OP, '.', 676, 12, 676, 13),
woosh.Token(woosh.NAME, 'update', 676, 13, 676, 19),
woosh.Token(woosh.OP, '(', 676, 19, 676, 20),
woosh.Token(woosh.NAME, 'self', 676, 20, 676, 24),
woosh.Token(woosh.OP, '.', 676, 24, 676, 25),
woosh.Token(woosh.NAME, '_defaults', 676, 25, 676, 34),
woosh.Token(woosh.OP, ')', 676, 34, 676, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 676, 35, 677, 0),
woosh.Token(woosh.NAME, 'return', 677, 8, 677, 14),
woosh.Token(woosh.NAME, 'list', 677, 15, 677, 19),
woosh.Token(woosh.OP, '(', 677, 19, 677, 20),
woosh.Token(woosh.NAME, 'opts', 677, 20, 677, 24),
woosh.Token(woosh.OP, '.', 677, 24, 677, 25),
woosh.Token(woosh.NAME, 'keys', 677, 25, 677, 29),
woosh.Token(woosh.OP, '(', 677, 29, 677, 30),
woosh.Token(woosh.OP, ')', 677, 30, 677, 31),
woosh.Token(woosh.OP, ')', 677, 31, 677, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 677, 32, 678, 0),
woosh.Token(woosh.DEDENT, ' ', 679, 0, 679, 4),
woosh.Token(woosh.NAME, 'def', 679, 4, 679, 7),
woosh.Token(woosh.NAME, 'read', 679, 8, 679, 12),
woosh.Token(woosh.OP, '(', 679, 12, 679, 13),
woosh.Token(woosh.NAME, 'self', 679, 13, 679, 17),
woosh.Token(woosh.OP, ',', 679, 17, 679, 18),
woosh.Token(woosh.NAME, 'filenames', 679, 19, 679, 28),
woosh.Token(woosh.OP, ',', 679, 28, 679, 29),
woosh.Token(woosh.NAME, 'encoding', 679, 30, 679, 38),
woosh.Token(woosh.OP, '=', 679, 38, 679, 39),
woosh.Token(woosh.NAME, 'None', 679, 39, 679, 43),
woosh.Token(woosh.OP, ')', 679, 43, 679, 44),
woosh.Token(woosh.OP, ':', 679, 44, 679, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 679, 45, 680, 0),
woosh.Token(woosh.INDENT, ' ', 680, 0, 680, 8),
woosh.Token(woosh.STRING, '"""Read and parse a filename or an iterable of filenames.\r\n\r\n Files that cannot be opened are silently ignored; this is\r\n designed so that you can specify an iterable of potential\r\n configuration file locations (e.g. current directory, user\'s\r\n home directory, systemwide directory), and all existing\r\n configuration files in the iterable will be read. A single\r\n filename may also be given.\r\n\r\n Return list of successfully read files.\r\n """', 680, 8, 690, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 690, 11, 691, 0),
woosh.Token(woosh.NAME, 'if', 691, 8, 691, 10),
woosh.Token(woosh.NAME, 'isinstance', 691, 11, 691, 21),
woosh.Token(woosh.OP, '(', 691, 21, 691, 22),
woosh.Token(woosh.NAME, 'filenames', 691, 22, 691, 31),
woosh.Token(woosh.OP, ',', 691, 31, 691, 32),
woosh.Token(woosh.OP, '(', 691, 33, 691, 34),
woosh.Token(woosh.NAME, 'str', 691, 34, 691, 37),
woosh.Token(woosh.OP, ',', 691, 37, 691, 38),
woosh.Token(woosh.NAME, 'bytes', 691, 39, 691, 44),
woosh.Token(woosh.OP, ',', 691, 44, 691, 45),
woosh.Token(woosh.NAME, 'os', 691, 46, 691, 48),
woosh.Token(woosh.OP, '.', 691, 48, 691, 49),
woosh.Token(woosh.NAME, 'PathLike', 691, 49, 691, 57),
woosh.Token(woosh.OP, ')', 691, 57, 691, 58),
woosh.Token(woosh.OP, ')', 691, 58, 691, 59),
woosh.Token(woosh.OP, ':', 691, 59, 691, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 691, 60, 692, 0),
woosh.Token(woosh.INDENT, ' ', 692, 0, 692, 12),
woosh.Token(woosh.NAME, 'filenames', 692, 12, 692, 21),
woosh.Token(woosh.OP, '=', 692, 22, 692, 23),
woosh.Token(woosh.OP, '[', 692, 24, 692, 25),
woosh.Token(woosh.NAME, 'filenames', 692, 25, 692, 34),
woosh.Token(woosh.OP, ']', 692, 34, 692, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 692, 35, 693, 0),
woosh.Token(woosh.DEDENT, ' ', 693, 0, 693, 8),
woosh.Token(woosh.NAME, 'read_ok', 693, 8, 693, 15),
woosh.Token(woosh.OP, '=', 693, 16, 693, 17),
woosh.Token(woosh.OP, '[', 693, 18, 693, 19),
woosh.Token(woosh.OP, ']', 693, 19, 693, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 693, 20, 694, 0),
woosh.Token(woosh.NAME, 'for', 694, 8, 694, 11),
woosh.Token(woosh.NAME, 'filename', 694, 12, 694, 20),
woosh.Token(woosh.NAME, 'in', 694, 21, 694, 23),
woosh.Token(woosh.NAME, 'filenames', 694, 24, 694, 33),
woosh.Token(woosh.OP, ':', 694, 33, 694, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 694, 34, 695, 0),
woosh.Token(woosh.INDENT, ' ', 695, 0, 695, 12),
woosh.Token(woosh.NAME, 'try', 695, 12, 695, 15),
woosh.Token(woosh.OP, ':', 695, 15, 695, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 695, 16, 696, 0),
woosh.Token(woosh.INDENT, ' ', 696, 0, 696, 16),
woosh.Token(woosh.NAME, 'with', 696, 16, 696, 20),
woosh.Token(woosh.NAME, 'open', 696, 21, 696, 25),
woosh.Token(woosh.OP, '(', 696, 25, 696, 26),
woosh.Token(woosh.NAME, 'filename', 696, 26, 696, 34),
woosh.Token(woosh.OP, ',', 696, 34, 696, 35),
woosh.Token(woosh.NAME, 'encoding', 696, 36, 696, 44),
woosh.Token(woosh.OP, '=', 696, 44, 696, 45),
woosh.Token(woosh.NAME, 'encoding', 696, 45, 696, 53),
woosh.Token(woosh.OP, ')', 696, 53, 696, 54),
woosh.Token(woosh.NAME, 'as', 696, 55, 696, 57),
woosh.Token(woosh.NAME, 'fp', 696, 58, 696, 60),
woosh.Token(woosh.OP, ':', 696, 60, 696, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 696, 61, 697, 0),
woosh.Token(woosh.INDENT, ' ', 697, 0, 697, 20),
woosh.Token(woosh.NAME, 'self', 697, 20, 697, 24),
woosh.Token(woosh.OP, '.', 697, 24, 697, 25),
woosh.Token(woosh.NAME, '_read', 697, 25, 697, 30),
woosh.Token(woosh.OP, '(', 697, 30, 697, 31),
woosh.Token(woosh.NAME, 'fp', 697, 31, 697, 33),
woosh.Token(woosh.OP, ',', 697, 33, 697, 34),
woosh.Token(woosh.NAME, 'filename', 697, 35, 697, 43),
woosh.Token(woosh.OP, ')', 697, 43, 697, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 697, 44, 698, 0),
woosh.Token(woosh.DEDENT, ' ', 698, 0, 698, 12),
woosh.Token(woosh.DEDENT, '', 698, 12, 698, 12),
woosh.Token(woosh.NAME, 'except', 698, 12, 698, 18),
woosh.Token(woosh.NAME, 'OSError', 698, 19, 698, 26),
woosh.Token(woosh.OP, ':', 698, 26, 698, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 698, 27, 699, 0),
woosh.Token(woosh.INDENT, ' ', 699, 0, 699, 16),
woosh.Token(woosh.NAME, 'continue', 699, 16, 699, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 699, 24, 700, 0),
woosh.Token(woosh.DEDENT, ' ', 700, 0, 700, 12),
woosh.Token(woosh.NAME, 'if', 700, 12, 700, 14),
woosh.Token(woosh.NAME, 'isinstance', 700, 15, 700, 25),
woosh.Token(woosh.OP, '(', 700, 25, 700, 26),
woosh.Token(woosh.NAME, 'filename', 700, 26, 700, 34),
woosh.Token(woosh.OP, ',', 700, 34, 700, 35),
woosh.Token(woosh.NAME, 'os', 700, 36, 700, 38),
woosh.Token(woosh.OP, '.', 700, 38, 700, 39),
woosh.Token(woosh.NAME, 'PathLike', 700, 39, 700, 47),
woosh.Token(woosh.OP, ')', 700, 47, 700, 48),
woosh.Token(woosh.OP, ':', 700, 48, 700, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 700, 49, 701, 0),
woosh.Token(woosh.INDENT, ' ', 701, 0, 701, 16),
woosh.Token(woosh.NAME, 'filename', 701, 16, 701, 24),
woosh.Token(woosh.OP, '=', 701, 25, 701, 26),
woosh.Token(woosh.NAME, 'os', 701, 27, 701, 29),
woosh.Token(woosh.OP, '.', 701, 29, 701, 30),
woosh.Token(woosh.NAME, 'fspath', 701, 30, 701, 36),
woosh.Token(woosh.OP, '(', 701, 36, 701, 37),
woosh.Token(woosh.NAME, 'filename', 701, 37, 701, 45),
woosh.Token(woosh.OP, ')', 701, 45, 701, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 701, 46, 702, 0),
woosh.Token(woosh.DEDENT, ' ', 702, 0, 702, 12),
woosh.Token(woosh.NAME, 'read_ok', 702, 12, 702, 19),
woosh.Token(woosh.OP, '.', 702, 19, 702, 20),
woosh.Token(woosh.NAME, 'append', 702, 20, 702, 26),
woosh.Token(woosh.OP, '(', 702, 26, 702, 27),
woosh.Token(woosh.NAME, 'filename', 702, 27, 702, 35),
woosh.Token(woosh.OP, ')', 702, 35, 702, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 702, 36, 703, 0),
woosh.Token(woosh.DEDENT, ' ', 703, 0, 703, 8),
woosh.Token(woosh.NAME, 'return', 703, 8, 703, 14),
woosh.Token(woosh.NAME, 'read_ok', 703, 15, 703, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 703, 22, 704, 0),
woosh.Token(woosh.DEDENT, ' ', 705, 0, 705, 4),
woosh.Token(woosh.NAME, 'def', 705, 4, 705, 7),
woosh.Token(woosh.NAME, 'read_file', 705, 8, 705, 17),
woosh.Token(woosh.OP, '(', 705, 17, 705, 18),
woosh.Token(woosh.NAME, 'self', 705, 18, 705, 22),
woosh.Token(woosh.OP, ',', 705, 22, 705, 23),
woosh.Token(woosh.NAME, 'f', 705, 24, 705, 25),
woosh.Token(woosh.OP, ',', 705, 25, 705, 26),
woosh.Token(woosh.NAME, 'source', 705, 27, 705, 33),
woosh.Token(woosh.OP, '=', 705, 33, 705, 34),
woosh.Token(woosh.NAME, 'None', 705, 34, 705, 38),
woosh.Token(woosh.OP, ')', 705, 38, 705, 39),
woosh.Token(woosh.OP, ':', 705, 39, 705, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 705, 40, 706, 0),
woosh.Token(woosh.INDENT, ' ', 706, 0, 706, 8),
woosh.Token(woosh.STRING, '"""Like read() but the argument must be a file-like object.\r\n\r\n The `f\' argument must be iterable, returning one line at a time.\r\n Optional second argument is the `source\' specifying the name of the\r\n file being read. If not given, it is taken from f.name. If `f\' has no\r\n `name\' attribute, `<???>\' is used.\r\n """', 706, 8, 712, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 712, 11, 713, 0),
woosh.Token(woosh.NAME, 'if', 713, 8, 713, 10),
woosh.Token(woosh.NAME, 'source', 713, 11, 713, 17),
woosh.Token(woosh.NAME, 'is', 713, 18, 713, 20),
woosh.Token(woosh.NAME, 'None', 713, 21, 713, 25),
woosh.Token(woosh.OP, ':', 713, 25, 713, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 713, 26, 714, 0),
woosh.Token(woosh.INDENT, ' ', 714, 0, 714, 12),
woosh.Token(woosh.NAME, 'try', 714, 12, 714, 15),
woosh.Token(woosh.OP, ':', 714, 15, 714, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 714, 16, 715, 0),
woosh.Token(woosh.INDENT, ' ', 715, 0, 715, 16),
woosh.Token(woosh.NAME, 'source', 715, 16, 715, 22),
woosh.Token(woosh.OP, '=', 715, 23, 715, 24),
woosh.Token(woosh.NAME, 'f', 715, 25, 715, 26),
woosh.Token(woosh.OP, '.', 715, 26, 715, 27),
woosh.Token(woosh.NAME, 'name', 715, 27, 715, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 715, 31, 716, 0),
woosh.Token(woosh.DEDENT, ' ', 716, 0, 716, 12),
woosh.Token(woosh.NAME, 'except', 716, 12, 716, 18),
woosh.Token(woosh.NAME, 'AttributeError', 716, 19, 716, 33),
woosh.Token(woosh.OP, ':', 716, 33, 716, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 716, 34, 717, 0),
woosh.Token(woosh.INDENT, ' ', 717, 0, 717, 16),
woosh.Token(woosh.NAME, 'source', 717, 16, 717, 22),
woosh.Token(woosh.OP, '=', 717, 23, 717, 24),
woosh.Token(woosh.STRING, "'<???>'", 717, 25, 717, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 717, 32, 718, 0),
woosh.Token(woosh.DEDENT, ' ', 718, 0, 718, 8),
woosh.Token(woosh.DEDENT, '', 718, 8, 718, 8),
woosh.Token(woosh.NAME, 'self', 718, 8, 718, 12),
woosh.Token(woosh.OP, '.', 718, 12, 718, 13),
woosh.Token(woosh.NAME, '_read', 718, 13, 718, 18),
woosh.Token(woosh.OP, '(', 718, 18, 718, 19),
woosh.Token(woosh.NAME, 'f', 718, 19, 718, 20),
woosh.Token(woosh.OP, ',', 718, 20, 718, 21),
woosh.Token(woosh.NAME, 'source', 718, 22, 718, 28),
woosh.Token(woosh.OP, ')', 718, 28, 718, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 718, 29, 719, 0),
woosh.Token(woosh.DEDENT, ' ', 720, 0, 720, 4),
woosh.Token(woosh.NAME, 'def', 720, 4, 720, 7),
woosh.Token(woosh.NAME, 'read_string', 720, 8, 720, 19),
woosh.Token(woosh.OP, '(', 720, 19, 720, 20),
woosh.Token(woosh.NAME, 'self', 720, 20, 720, 24),
woosh.Token(woosh.OP, ',', 720, 24, 720, 25),
woosh.Token(woosh.NAME, 'string', 720, 26, 720, 32),
woosh.Token(woosh.OP, ',', 720, 32, 720, 33),
woosh.Token(woosh.NAME, 'source', 720, 34, 720, 40),
woosh.Token(woosh.OP, '=', 720, 40, 720, 41),
woosh.Token(woosh.STRING, "'<string>'", 720, 41, 720, 51),
woosh.Token(woosh.OP, ')', 720, 51, 720, 52),
woosh.Token(woosh.OP, ':', 720, 52, 720, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 720, 53, 721, 0),
woosh.Token(woosh.INDENT, ' ', 721, 0, 721, 8),
woosh.Token(woosh.STRING, '"""Read configuration from a given string."""', 721, 8, 721, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 721, 53, 722, 0),
woosh.Token(woosh.NAME, 'sfile', 722, 8, 722, 13),
woosh.Token(woosh.OP, '=', 722, 14, 722, 15),
woosh.Token(woosh.NAME, 'io', 722, 16, 722, 18),
woosh.Token(woosh.OP, '.', 722, 18, 722, 19),
woosh.Token(woosh.NAME, 'StringIO', 722, 19, 722, 27),
woosh.Token(woosh.OP, '(', 722, 27, 722, 28),
woosh.Token(woosh.NAME, 'string', 722, 28, 722, 34),
woosh.Token(woosh.OP, ')', 722, 34, 722, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 722, 35, 723, 0),
woosh.Token(woosh.NAME, 'self', 723, 8, 723, 12),
woosh.Token(woosh.OP, '.', 723, 12, 723, 13),
woosh.Token(woosh.NAME, 'read_file', 723, 13, 723, 22),
woosh.Token(woosh.OP, '(', 723, 22, 723, 23),
woosh.Token(woosh.NAME, 'sfile', 723, 23, 723, 28),
woosh.Token(woosh.OP, ',', 723, 28, 723, 29),
woosh.Token(woosh.NAME, 'source', 723, 30, 723, 36),
woosh.Token(woosh.OP, ')', 723, 36, 723, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 723, 37, 724, 0),
woosh.Token(woosh.DEDENT, ' ', 725, 0, 725, 4),
woosh.Token(woosh.NAME, 'def', 725, 4, 725, 7),
woosh.Token(woosh.NAME, 'read_dict', 725, 8, 725, 17),
woosh.Token(woosh.OP, '(', 725, 17, 725, 18),
woosh.Token(woosh.NAME, 'self', 725, 18, 725, 22),
woosh.Token(woosh.OP, ',', 725, 22, 725, 23),
woosh.Token(woosh.NAME, 'dictionary', 725, 24, 725, 34),
woosh.Token(woosh.OP, ',', 725, 34, 725, 35),
woosh.Token(woosh.NAME, 'source', 725, 36, 725, 42),
woosh.Token(woosh.OP, '=', 725, 42, 725, 43),
woosh.Token(woosh.STRING, "'<dict>'", 725, 43, 725, 51),
woosh.Token(woosh.OP, ')', 725, 51, 725, 52),
woosh.Token(woosh.OP, ':', 725, 52, 725, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 725, 53, 726, 0),
woosh.Token(woosh.INDENT, ' ', 726, 0, 726, 8),
woosh.Token(woosh.STRING, '"""Read configuration from a dictionary.\r\n\r\n Keys are section names, values are dictionaries with keys and values\r\n that should be present in the section. If the used dictionary type\r\n preserves order, sections and their keys will be added in order.\r\n\r\n All types held in the dictionary are converted to strings during\r\n reading, including section names, option names and keys.\r\n\r\n Optional second argument is the `source\' specifying the name of the\r\n dictionary being read.\r\n """', 726, 8, 737, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 737, 11, 738, 0),
woosh.Token(woosh.NAME, 'elements_added', 738, 8, 738, 22),
woosh.Token(woosh.OP, '=', 738, 23, 738, 24),
woosh.Token(woosh.NAME, 'set', 738, 25, 738, 28),
woosh.Token(woosh.OP, '(', 738, 28, 738, 29),
woosh.Token(woosh.OP, ')', 738, 29, 738, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 738, 30, 739, 0),
woosh.Token(woosh.NAME, 'for', 739, 8, 739, 11),
woosh.Token(woosh.NAME, 'section', 739, 12, 739, 19),
woosh.Token(woosh.OP, ',', 739, 19, 739, 20),
woosh.Token(woosh.NAME, 'keys', 739, 21, 739, 25),
woosh.Token(woosh.NAME, 'in', 739, 26, 739, 28),
woosh.Token(woosh.NAME, 'dictionary', 739, 29, 739, 39),
woosh.Token(woosh.OP, '.', 739, 39, 739, 40),
woosh.Token(woosh.NAME, 'items', 739, 40, 739, 45),
woosh.Token(woosh.OP, '(', 739, 45, 739, 46),
woosh.Token(woosh.OP, ')', 739, 46, 739, 47),
woosh.Token(woosh.OP, ':', 739, 47, 739, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 739, 48, 740, 0),
woosh.Token(woosh.INDENT, ' ', 740, 0, 740, 12),
woosh.Token(woosh.NAME, 'section', 740, 12, 740, 19),
woosh.Token(woosh.OP, '=', 740, 20, 740, 21),
woosh.Token(woosh.NAME, 'str', 740, 22, 740, 25),
woosh.Token(woosh.OP, '(', 740, 25, 740, 26),
woosh.Token(woosh.NAME, 'section', 740, 26, 740, 33),
woosh.Token(woosh.OP, ')', 740, 33, 740, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 740, 34, 741, 0),
woosh.Token(woosh.NAME, 'try', 741, 12, 741, 15),
woosh.Token(woosh.OP, ':', 741, 15, 741, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 741, 16, 742, 0),
woosh.Token(woosh.INDENT, ' ', 742, 0, 742, 16),
woosh.Token(woosh.NAME, 'self', 742, 16, 742, 20),
woosh.Token(woosh.OP, '.', 742, 20, 742, 21),
woosh.Token(woosh.NAME, 'add_section', 742, 21, 742, 32),
woosh.Token(woosh.OP, '(', 742, 32, 742, 33),
woosh.Token(woosh.NAME, 'section', 742, 33, 742, 40),
woosh.Token(woosh.OP, ')', 742, 40, 742, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 742, 41, 743, 0),
woosh.Token(woosh.DEDENT, ' ', 743, 0, 743, 12),
woosh.Token(woosh.NAME, 'except', 743, 12, 743, 18),
woosh.Token(woosh.OP, '(', 743, 19, 743, 20),
woosh.Token(woosh.NAME, 'DuplicateSectionError', 743, 20, 743, 41),
woosh.Token(woosh.OP, ',', 743, 41, 743, 42),
woosh.Token(woosh.NAME, 'ValueError', 743, 43, 743, 53),
woosh.Token(woosh.OP, ')', 743, 53, 743, 54),
woosh.Token(woosh.OP, ':', 743, 54, 743, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 743, 55, 744, 0),
woosh.Token(woosh.INDENT, ' ', 744, 0, 744, 16),
woosh.Token(woosh.NAME, 'if', 744, 16, 744, 18),
woosh.Token(woosh.NAME, 'self', 744, 19, 744, 23),
woosh.Token(woosh.OP, '.', 744, 23, 744, 24),
woosh.Token(woosh.NAME, '_strict', 744, 24, 744, 31),
woosh.Token(woosh.NAME, 'and', 744, 32, 744, 35),
woosh.Token(woosh.NAME, 'section', 744, 36, 744, 43),
woosh.Token(woosh.NAME, 'in', 744, 44, 744, 46),
woosh.Token(woosh.NAME, 'elements_added', 744, 47, 744, 61),
woosh.Token(woosh.OP, ':', 744, 61, 744, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 744, 62, 745, 0),
woosh.Token(woosh.INDENT, ' ', 745, 0, 745, 20),
woosh.Token(woosh.NAME, 'raise', 745, 20, 745, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 745, 25, 746, 0),
woosh.Token(woosh.DEDENT, ' ', 746, 0, 746, 12),
woosh.Token(woosh.DEDENT, '', 746, 12, 746, 12),
woosh.Token(woosh.NAME, 'elements_added', 746, 12, 746, 26),
woosh.Token(woosh.OP, '.', 746, 26, 746, 27),
woosh.Token(woosh.NAME, 'add', 746, 27, 746, 30),
woosh.Token(woosh.OP, '(', 746, 30, 746, 31),
woosh.Token(woosh.NAME, 'section', 746, 31, 746, 38),
woosh.Token(woosh.OP, ')', 746, 38, 746, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 746, 39, 747, 0),
woosh.Token(woosh.NAME, 'for', 747, 12, 747, 15),
woosh.Token(woosh.NAME, 'key', 747, 16, 747, 19),
woosh.Token(woosh.OP, ',', 747, 19, 747, 20),
woosh.Token(woosh.NAME, 'value', 747, 21, 747, 26),
woosh.Token(woosh.NAME, 'in', 747, 27, 747, 29),
woosh.Token(woosh.NAME, 'keys', 747, 30, 747, 34),
woosh.Token(woosh.OP, '.', 747, 34, 747, 35),
woosh.Token(woosh.NAME, 'items', 747, 35, 747, 40),
woosh.Token(woosh.OP, '(', 747, 40, 747, 41),
woosh.Token(woosh.OP, ')', 747, 41, 747, 42),
woosh.Token(woosh.OP, ':', 747, 42, 747, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 747, 43, 748, 0),
woosh.Token(woosh.INDENT, ' ', 748, 0, 748, 16),
woosh.Token(woosh.NAME, 'key', 748, 16, 748, 19),
woosh.Token(woosh.OP, '=', 748, 20, 748, 21),
woosh.Token(woosh.NAME, 'self', 748, 22, 748, 26),
woosh.Token(woosh.OP, '.', 748, 26, 748, 27),
woosh.Token(woosh.NAME, 'optionxform', 748, 27, 748, 38),
woosh.Token(woosh.OP, '(', 748, 38, 748, 39),
woosh.Token(woosh.NAME, 'str', 748, 39, 748, 42),
woosh.Token(woosh.OP, '(', 748, 42, 748, 43),
woosh.Token(woosh.NAME, 'key', 748, 43, 748, 46),
woosh.Token(woosh.OP, ')', 748, 46, 748, 47),
woosh.Token(woosh.OP, ')', 748, 47, 748, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 748, 48, 749, 0),
woosh.Token(woosh.NAME, 'if', 749, 16, 749, 18),
woosh.Token(woosh.NAME, 'value', 749, 19, 749, 24),
woosh.Token(woosh.NAME, 'is', 749, 25, 749, 27),
woosh.Token(woosh.NAME, 'not', 749, 28, 749, 31),
woosh.Token(woosh.NAME, 'None', 749, 32, 749, 36),
woosh.Token(woosh.OP, ':', 749, 36, 749, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 749, 37, 750, 0),
woosh.Token(woosh.INDENT, ' ', 750, 0, 750, 20),
woosh.Token(woosh.NAME, 'value', 750, 20, 750, 25),
woosh.Token(woosh.OP, '=', 750, 26, 750, 27),
woosh.Token(woosh.NAME, 'str', 750, 28, 750, 31),
woosh.Token(woosh.OP, '(', 750, 31, 750, 32),
woosh.Token(woosh.NAME, 'value', 750, 32, 750, 37),
woosh.Token(woosh.OP, ')', 750, 37, 750, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 750, 38, 751, 0),
woosh.Token(woosh.DEDENT, ' ', 751, 0, 751, 16),
woosh.Token(woosh.NAME, 'if', 751, 16, 751, 18),
woosh.Token(woosh.NAME, 'self', 751, 19, 751, 23),
woosh.Token(woosh.OP, '.', 751, 23, 751, 24),
woosh.Token(woosh.NAME, '_strict', 751, 24, 751, 31),
woosh.Token(woosh.NAME, 'and', 751, 32, 751, 35),
woosh.Token(woosh.OP, '(', 751, 36, 751, 37),
woosh.Token(woosh.NAME, 'section', 751, 37, 751, 44),
woosh.Token(woosh.OP, ',', 751, 44, 751, 45),
woosh.Token(woosh.NAME, 'key', 751, 46, 751, 49),
woosh.Token(woosh.OP, ')', 751, 49, 751, 50),
woosh.Token(woosh.NAME, 'in', 751, 51, 751, 53),
woosh.Token(woosh.NAME, 'elements_added', 751, 54, 751, 68),
woosh.Token(woosh.OP, ':', 751, 68, 751, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 751, 69, 752, 0),
woosh.Token(woosh.INDENT, ' ', 752, 0, 752, 20),
woosh.Token(woosh.NAME, 'raise', 752, 20, 752, 25),
woosh.Token(woosh.NAME, 'DuplicateOptionError', 752, 26, 752, 46),
woosh.Token(woosh.OP, '(', 752, 46, 752, 47),
woosh.Token(woosh.NAME, 'section', 752, 47, 752, 54),
woosh.Token(woosh.OP, ',', 752, 54, 752, 55),
woosh.Token(woosh.NAME, 'key', 752, 56, 752, 59),
woosh.Token(woosh.OP, ',', 752, 59, 752, 60),
woosh.Token(woosh.NAME, 'source', 752, 61, 752, 67),
woosh.Token(woosh.OP, ')', 752, 67, 752, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 752, 68, 753, 0),
woosh.Token(woosh.DEDENT, ' ', 753, 0, 753, 16),
woosh.Token(woosh.NAME, 'elements_added', 753, 16, 753, 30),
woosh.Token(woosh.OP, '.', 753, 30, 753, 31),
woosh.Token(woosh.NAME, 'add', 753, 31, 753, 34),
woosh.Token(woosh.OP, '(', 753, 34, 753, 35),
woosh.Token(woosh.OP, '(', 753, 35, 753, 36),
woosh.Token(woosh.NAME, 'section', 753, 36, 753, 43),
woosh.Token(woosh.OP, ',', 753, 43, 753, 44),
woosh.Token(woosh.NAME, 'key', 753, 45, 753, 48),
woosh.Token(woosh.OP, ')', 753, 48, 753, 49),
woosh.Token(woosh.OP, ')', 753, 49, 753, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 753, 50, 754, 0),
woosh.Token(woosh.NAME, 'self', 754, 16, 754, 20),
woosh.Token(woosh.OP, '.', 754, 20, 754, 21),
woosh.Token(woosh.NAME, 'set', 754, 21, 754, 24),
woosh.Token(woosh.OP, '(', 754, 24, 754, 25),
woosh.Token(woosh.NAME, 'section', 754, 25, 754, 32),
woosh.Token(woosh.OP, ',', 754, 32, 754, 33),
woosh.Token(woosh.NAME, 'key', 754, 34, 754, 37),
woosh.Token(woosh.OP, ',', 754, 37, 754, 38),
woosh.Token(woosh.NAME, 'value', 754, 39, 754, 44),
woosh.Token(woosh.OP, ')', 754, 44, 754, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 754, 45, 755, 0),
woosh.Token(woosh.DEDENT, ' ', 756, 0, 756, 4),
woosh.Token(woosh.DEDENT, '', 756, 4, 756, 4),
woosh.Token(woosh.DEDENT, '', 756, 4, 756, 4),
woosh.Token(woosh.NAME, 'def', 756, 4, 756, 7),
woosh.Token(woosh.NAME, 'readfp', 756, 8, 756, 14),
woosh.Token(woosh.OP, '(', 756, 14, 756, 15),
woosh.Token(woosh.NAME, 'self', 756, 15, 756, 19),
woosh.Token(woosh.OP, ',', 756, 19, 756, 20),
woosh.Token(woosh.NAME, 'fp', 756, 21, 756, 23),
woosh.Token(woosh.OP, ',', 756, 23, 756, 24),
woosh.Token(woosh.NAME, 'filename', 756, 25, 756, 33),
woosh.Token(woosh.OP, '=', 756, 33, 756, 34),
woosh.Token(woosh.NAME, 'None', 756, 34, 756, 38),
woosh.Token(woosh.OP, ')', 756, 38, 756, 39),
woosh.Token(woosh.OP, ':', 756, 39, 756, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 756, 40, 757, 0),
woosh.Token(woosh.INDENT, ' ', 757, 0, 757, 8),
woosh.Token(woosh.STRING, '"""Deprecated, use read_file instead."""', 757, 8, 757, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 757, 48, 758, 0),
woosh.Token(woosh.NAME, 'warnings', 758, 8, 758, 16),
woosh.Token(woosh.OP, '.', 758, 16, 758, 17),
woosh.Token(woosh.NAME, 'warn', 758, 17, 758, 21),
woosh.Token(woosh.OP, '(', 758, 21, 758, 22),
woosh.Token(woosh.STRING, '"This method will be removed in future versions. "', 759, 12, 759, 63),
woosh.Token(woosh.STRING, '"Use \'parser.read_file()\' instead."', 760, 12, 760, 47),
woosh.Token(woosh.OP, ',', 760, 47, 760, 48),
woosh.Token(woosh.NAME, 'DeprecationWarning', 761, 12, 761, 30),
woosh.Token(woosh.OP, ',', 761, 30, 761, 31),
woosh.Token(woosh.NAME, 'stacklevel', 761, 32, 761, 42),
woosh.Token(woosh.OP, '=', 761, 42, 761, 43),
woosh.Token(woosh.NUMBER, '2', 761, 43, 761, 44),
woosh.Token(woosh.OP, ')', 762, 8, 762, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 762, 9, 763, 0),
woosh.Token(woosh.NAME, 'self', 763, 8, 763, 12),
woosh.Token(woosh.OP, '.', 763, 12, 763, 13),
woosh.Token(woosh.NAME, 'read_file', 763, 13, 763, 22),
woosh.Token(woosh.OP, '(', 763, 22, 763, 23),
woosh.Token(woosh.NAME, 'fp', 763, 23, 763, 25),
woosh.Token(woosh.OP, ',', 763, 25, 763, 26),
woosh.Token(woosh.NAME, 'source', 763, 27, 763, 33),
woosh.Token(woosh.OP, '=', 763, 33, 763, 34),
woosh.Token(woosh.NAME, 'filename', 763, 34, 763, 42),
woosh.Token(woosh.OP, ')', 763, 42, 763, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 763, 43, 764, 0),
woosh.Token(woosh.DEDENT, ' ', 765, 0, 765, 4),
woosh.Token(woosh.NAME, 'def', 765, 4, 765, 7),
woosh.Token(woosh.NAME, 'get', 765, 8, 765, 11),
woosh.Token(woosh.OP, '(', 765, 11, 765, 12),
woosh.Token(woosh.NAME, 'self', 765, 12, 765, 16),
woosh.Token(woosh.OP, ',', 765, 16, 765, 17),
woosh.Token(woosh.NAME, 'section', 765, 18, 765, 25),
woosh.Token(woosh.OP, ',', 765, 25, 765, 26),
woosh.Token(woosh.NAME, 'option', 765, 27, 765, 33),
woosh.Token(woosh.OP, ',', 765, 33, 765, 34),
woosh.Token(woosh.OP, '*', 765, 35, 765, 36),
woosh.Token(woosh.OP, ',', 765, 36, 765, 37),
woosh.Token(woosh.NAME, 'raw', 765, 38, 765, 41),
woosh.Token(woosh.OP, '=', 765, 41, 765, 42),
woosh.Token(woosh.NAME, 'False', 765, 42, 765, 47),
woosh.Token(woosh.OP, ',', 765, 47, 765, 48),
woosh.Token(woosh.NAME, 'vars', 765, 49, 765, 53),
woosh.Token(woosh.OP, '=', 765, 53, 765, 54),
woosh.Token(woosh.NAME, 'None', 765, 54, 765, 58),
woosh.Token(woosh.OP, ',', 765, 58, 765, 59),
woosh.Token(woosh.NAME, 'fallback', 765, 60, 765, 68),
woosh.Token(woosh.OP, '=', 765, 68, 765, 69),
woosh.Token(woosh.NAME, '_UNSET', 765, 69, 765, 75),
woosh.Token(woosh.OP, ')', 765, 75, 765, 76),
woosh.Token(woosh.OP, ':', 765, 76, 765, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 765, 77, 766, 0),
woosh.Token(woosh.INDENT, ' ', 766, 0, 766, 8),
woosh.Token(woosh.STRING, '"""Get an option value for a given section.\r\n\r\n If `vars\' is provided, it must be a dictionary. The option is looked up\r\n in `vars\' (if provided), `section\', and in `DEFAULTSECT\' in that order.\r\n If the key is not found and `fallback\' is provided, it is used as\r\n a fallback value. `None\' can be provided as a `fallback\' value.\r\n\r\n If interpolation is enabled and the optional argument `raw\' is False,\r\n all interpolations are expanded in the return values.\r\n\r\n Arguments `raw\', `vars\', and `fallback\' are keyword only.\r\n\r\n The section DEFAULT is special.\r\n """', 766, 8, 779, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 779, 11, 780, 0),
woosh.Token(woosh.NAME, 'try', 780, 8, 780, 11),
woosh.Token(woosh.OP, ':', 780, 11, 780, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 780, 12, 781, 0),
woosh.Token(woosh.INDENT, ' ', 781, 0, 781, 12),
woosh.Token(woosh.NAME, 'd', 781, 12, 781, 13),
woosh.Token(woosh.OP, '=', 781, 14, 781, 15),
woosh.Token(woosh.NAME, 'self', 781, 16, 781, 20),
woosh.Token(woosh.OP, '.', 781, 20, 781, 21),
woosh.Token(woosh.NAME, '_unify_values', 781, 21, 781, 34),
woosh.Token(woosh.OP, '(', 781, 34, 781, 35),
woosh.Token(woosh.NAME, 'section', 781, 35, 781, 42),
woosh.Token(woosh.OP, ',', 781, 42, 781, 43),
woosh.Token(woosh.NAME, 'vars', 781, 44, 781, 48),
woosh.Token(woosh.OP, ')', 781, 48, 781, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 781, 49, 782, 0),
woosh.Token(woosh.DEDENT, ' ', 782, 0, 782, 8),
woosh.Token(woosh.NAME, 'except', 782, 8, 782, 14),
woosh.Token(woosh.NAME, 'NoSectionError', 782, 15, 782, 29),
woosh.Token(woosh.OP, ':', 782, 29, 782, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 782, 30, 783, 0),
woosh.Token(woosh.INDENT, ' ', 783, 0, 783, 12),
woosh.Token(woosh.NAME, 'if', 783, 12, 783, 14),
woosh.Token(woosh.NAME, 'fallback', 783, 15, 783, 23),
woosh.Token(woosh.NAME, 'is', 783, 24, 783, 26),
woosh.Token(woosh.NAME, '_UNSET', 783, 27, 783, 33),
woosh.Token(woosh.OP, ':', 783, 33, 783, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 783, 34, 784, 0),
woosh.Token(woosh.INDENT, ' ', 784, 0, 784, 16),
woosh.Token(woosh.NAME, 'raise', 784, 16, 784, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 784, 21, 785, 0),
woosh.Token(woosh.DEDENT, ' ', 785, 0, 785, 12),
woosh.Token(woosh.NAME, 'else', 785, 12, 785, 16),
woosh.Token(woosh.OP, ':', 785, 16, 785, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 785, 17, 786, 0),
woosh.Token(woosh.INDENT, ' ', 786, 0, 786, 16),
woosh.Token(woosh.NAME, 'return', 786, 16, 786, 22),
woosh.Token(woosh.NAME, 'fallback', 786, 23, 786, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 786, 31, 787, 0),
woosh.Token(woosh.DEDENT, ' ', 787, 0, 787, 8),
woosh.Token(woosh.DEDENT, '', 787, 8, 787, 8),
woosh.Token(woosh.NAME, 'option', 787, 8, 787, 14),
woosh.Token(woosh.OP, '=', 787, 15, 787, 16),
woosh.Token(woosh.NAME, 'self', 787, 17, 787, 21),
woosh.Token(woosh.OP, '.', 787, 21, 787, 22),
woosh.Token(woosh.NAME, 'optionxform', 787, 22, 787, 33),
woosh.Token(woosh.OP, '(', 787, 33, 787, 34),
woosh.Token(woosh.NAME, 'option', 787, 34, 787, 40),
woosh.Token(woosh.OP, ')', 787, 40, 787, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 787, 41, 788, 0),
woosh.Token(woosh.NAME, 'try', 788, 8, 788, 11),
woosh.Token(woosh.OP, ':', 788, 11, 788, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 788, 12, 789, 0),
woosh.Token(woosh.INDENT, ' ', 789, 0, 789, 12),
woosh.Token(woosh.NAME, 'value', 789, 12, 789, 17),
woosh.Token(woosh.OP, '=', 789, 18, 789, 19),
woosh.Token(woosh.NAME, 'd', 789, 20, 789, 21),
woosh.Token(woosh.OP, '[', 789, 21, 789, 22),
woosh.Token(woosh.NAME, 'option', 789, 22, 789, 28),
woosh.Token(woosh.OP, ']', 789, 28, 789, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 789, 29, 790, 0),
woosh.Token(woosh.DEDENT, ' ', 790, 0, 790, 8),
woosh.Token(woosh.NAME, 'except', 790, 8, 790, 14),
woosh.Token(woosh.NAME, 'KeyError', 790, 15, 790, 23),
woosh.Token(woosh.OP, ':', 790, 23, 790, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 790, 24, 791, 0),
woosh.Token(woosh.INDENT, ' ', 791, 0, 791, 12),
woosh.Token(woosh.NAME, 'if', 791, 12, 791, 14),
woosh.Token(woosh.NAME, 'fallback', 791, 15, 791, 23),
woosh.Token(woosh.NAME, 'is', 791, 24, 791, 26),
woosh.Token(woosh.NAME, '_UNSET', 791, 27, 791, 33),
woosh.Token(woosh.OP, ':', 791, 33, 791, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 791, 34, 792, 0),
woosh.Token(woosh.INDENT, ' ', 792, 0, 792, 16),
woosh.Token(woosh.NAME, 'raise', 792, 16, 792, 21),
woosh.Token(woosh.NAME, 'NoOptionError', 792, 22, 792, 35),
woosh.Token(woosh.OP, '(', 792, 35, 792, 36),
woosh.Token(woosh.NAME, 'option', 792, 36, 792, 42),
woosh.Token(woosh.OP, ',', 792, 42, 792, 43),
woosh.Token(woosh.NAME, 'section', 792, 44, 792, 51),
woosh.Token(woosh.OP, ')', 792, 51, 792, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 792, 52, 793, 0),
woosh.Token(woosh.DEDENT, ' ', 793, 0, 793, 12),
woosh.Token(woosh.NAME, 'else', 793, 12, 793, 16),
woosh.Token(woosh.OP, ':', 793, 16, 793, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 793, 17, 794, 0),
woosh.Token(woosh.INDENT, ' ', 794, 0, 794, 16),
woosh.Token(woosh.NAME, 'return', 794, 16, 794, 22),
woosh.Token(woosh.NAME, 'fallback', 794, 23, 794, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 794, 31, 795, 0),
woosh.Token(woosh.DEDENT, ' ', 796, 0, 796, 8),
woosh.Token(woosh.DEDENT, '', 796, 8, 796, 8),
woosh.Token(woosh.NAME, 'if', 796, 8, 796, 10),
woosh.Token(woosh.NAME, 'raw', 796, 11, 796, 14),
woosh.Token(woosh.NAME, 'or', 796, 15, 796, 17),
woosh.Token(woosh.NAME, 'value', 796, 18, 796, 23),
woosh.Token(woosh.NAME, 'is', 796, 24, 796, 26),
woosh.Token(woosh.NAME, 'None', 796, 27, 796, 31),
woosh.Token(woosh.OP, ':', 796, 31, 796, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 796, 32, 797, 0),
woosh.Token(woosh.INDENT, ' ', 797, 0, 797, 12),
woosh.Token(woosh.NAME, 'return', 797, 12, 797, 18),
woosh.Token(woosh.NAME, 'value', 797, 19, 797, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 797, 24, 798, 0),
woosh.Token(woosh.DEDENT, ' ', 798, 0, 798, 8),
woosh.Token(woosh.NAME, 'else', 798, 8, 798, 12),
woosh.Token(woosh.OP, ':', 798, 12, 798, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 798, 13, 799, 0),
woosh.Token(woosh.INDENT, ' ', 799, 0, 799, 12),
woosh.Token(woosh.NAME, 'return', 799, 12, 799, 18),
woosh.Token(woosh.NAME, 'self', 799, 19, 799, 23),
woosh.Token(woosh.OP, '.', 799, 23, 799, 24),
woosh.Token(woosh.NAME, '_interpolation', 799, 24, 799, 38),
woosh.Token(woosh.OP, '.', 799, 38, 799, 39),
woosh.Token(woosh.NAME, 'before_get', 799, 39, 799, 49),
woosh.Token(woosh.OP, '(', 799, 49, 799, 50),
woosh.Token(woosh.NAME, 'self', 799, 50, 799, 54),
woosh.Token(woosh.OP, ',', 799, 54, 799, 55),
woosh.Token(woosh.NAME, 'section', 799, 56, 799, 63),
woosh.Token(woosh.OP, ',', 799, 63, 799, 64),
woosh.Token(woosh.NAME, 'option', 799, 65, 799, 71),
woosh.Token(woosh.OP, ',', 799, 71, 799, 72),
woosh.Token(woosh.NAME, 'value', 799, 73, 799, 78),
woosh.Token(woosh.OP, ',', 799, 78, 799, 79),
woosh.Token(woosh.NAME, 'd', 800, 50, 800, 51),
woosh.Token(woosh.OP, ')', 800, 51, 800, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 800, 52, 801, 0),
woosh.Token(woosh.DEDENT, ' ', 802, 0, 802, 4),
woosh.Token(woosh.DEDENT, '', 802, 4, 802, 4),
woosh.Token(woosh.NAME, 'def', 802, 4, 802, 7),
woosh.Token(woosh.NAME, '_get', 802, 8, 802, 12),
woosh.Token(woosh.OP, '(', 802, 12, 802, 13),
woosh.Token(woosh.NAME, 'self', 802, 13, 802, 17),
woosh.Token(woosh.OP, ',', 802, 17, 802, 18),
woosh.Token(woosh.NAME, 'section', 802, 19, 802, 26),
woosh.Token(woosh.OP, ',', 802, 26, 802, 27),
woosh.Token(woosh.NAME, 'conv', 802, 28, 802, 32),
woosh.Token(woosh.OP, ',', 802, 32, 802, 33),
woosh.Token(woosh.NAME, 'option', 802, 34, 802, 40),
woosh.Token(woosh.OP, ',', 802, 40, 802, 41),
woosh.Token(woosh.OP, '**', 802, 42, 802, 44),
woosh.Token(woosh.NAME, 'kwargs', 802, 44, 802, 50),
woosh.Token(woosh.OP, ')', 802, 50, 802, 51),
woosh.Token(woosh.OP, ':', 802, 51, 802, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 802, 52, 803, 0),
woosh.Token(woosh.INDENT, ' ', 803, 0, 803, 8),
woosh.Token(woosh.NAME, 'return', 803, 8, 803, 14),
woosh.Token(woosh.NAME, 'conv', 803, 15, 803, 19),
woosh.Token(woosh.OP, '(', 803, 19, 803, 20),
woosh.Token(woosh.NAME, 'self', 803, 20, 803, 24),
woosh.Token(woosh.OP, '.', 803, 24, 803, 25),
woosh.Token(woosh.NAME, 'get', 803, 25, 803, 28),
woosh.Token(woosh.OP, '(', 803, 28, 803, 29),
woosh.Token(woosh.NAME, 'section', 803, 29, 803, 36),
woosh.Token(woosh.OP, ',', 803, 36, 803, 37),
woosh.Token(woosh.NAME, 'option', 803, 38, 803, 44),
woosh.Token(woosh.OP, ',', 803, 44, 803, 45),
woosh.Token(woosh.OP, '**', 803, 46, 803, 48),
woosh.Token(woosh.NAME, 'kwargs', 803, 48, 803, 54),
woosh.Token(woosh.OP, ')', 803, 54, 803, 55),
woosh.Token(woosh.OP, ')', 803, 55, 803, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 803, 56, 804, 0),
woosh.Token(woosh.DEDENT, ' ', 805, 0, 805, 4),
woosh.Token(woosh.NAME, 'def', 805, 4, 805, 7),
woosh.Token(woosh.NAME, '_get_conv', 805, 8, 805, 17),
woosh.Token(woosh.OP, '(', 805, 17, 805, 18),
woosh.Token(woosh.NAME, 'self', 805, 18, 805, 22),
woosh.Token(woosh.OP, ',', 805, 22, 805, 23),
woosh.Token(woosh.NAME, 'section', 805, 24, 805, 31),
woosh.Token(woosh.OP, ',', 805, 31, 805, 32),
woosh.Token(woosh.NAME, 'option', 805, 33, 805, 39),
woosh.Token(woosh.OP, ',', 805, 39, 805, 40),
woosh.Token(woosh.NAME, 'conv', 805, 41, 805, 45),
woosh.Token(woosh.OP, ',', 805, 45, 805, 46),
woosh.Token(woosh.OP, '*', 805, 47, 805, 48),
woosh.Token(woosh.OP, ',', 805, 48, 805, 49),
woosh.Token(woosh.NAME, 'raw', 805, 50, 805, 53),
woosh.Token(woosh.OP, '=', 805, 53, 805, 54),
woosh.Token(woosh.NAME, 'False', 805, 54, 805, 59),
woosh.Token(woosh.OP, ',', 805, 59, 805, 60),
woosh.Token(woosh.NAME, 'vars', 805, 61, 805, 65),
woosh.Token(woosh.OP, '=', 805, 65, 805, 66),
woosh.Token(woosh.NAME, 'None', 805, 66, 805, 70),
woosh.Token(woosh.OP, ',', 805, 70, 805, 71),
woosh.Token(woosh.NAME, 'fallback', 806, 18, 806, 26),
woosh.Token(woosh.OP, '=', 806, 26, 806, 27),
woosh.Token(woosh.NAME, '_UNSET', 806, 27, 806, 33),
woosh.Token(woosh.OP, ',', 806, 33, 806, 34),
woosh.Token(woosh.OP, '**', 806, 35, 806, 37),
woosh.Token(woosh.NAME, 'kwargs', 806, 37, 806, 43),
woosh.Token(woosh.OP, ')', 806, 43, 806, 44),
woosh.Token(woosh.OP, ':', 806, 44, 806, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 806, 45, 807, 0),
woosh.Token(woosh.INDENT, ' ', 807, 0, 807, 8),
woosh.Token(woosh.NAME, 'try', 807, 8, 807, 11),
woosh.Token(woosh.OP, ':', 807, 11, 807, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 807, 12, 808, 0),
woosh.Token(woosh.INDENT, ' ', 808, 0, 808, 12),
woosh.Token(woosh.NAME, 'return', 808, 12, 808, 18),
woosh.Token(woosh.NAME, 'self', 808, 19, 808, 23),
woosh.Token(woosh.OP, '.', 808, 23, 808, 24),
woosh.Token(woosh.NAME, '_get', 808, 24, 808, 28),
woosh.Token(woosh.OP, '(', 808, 28, 808, 29),
woosh.Token(woosh.NAME, 'section', 808, 29, 808, 36),
woosh.Token(woosh.OP, ',', 808, 36, 808, 37),
woosh.Token(woosh.NAME, 'conv', 808, 38, 808, 42),
woosh.Token(woosh.OP, ',', 808, 42, 808, 43),
woosh.Token(woosh.NAME, 'option', 808, 44, 808, 50),
woosh.Token(woosh.OP, ',', 808, 50, 808, 51),
woosh.Token(woosh.NAME, 'raw', 808, 52, 808, 55),
woosh.Token(woosh.OP, '=', 808, 55, 808, 56),
woosh.Token(woosh.NAME, 'raw', 808, 56, 808, 59),
woosh.Token(woosh.OP, ',', 808, 59, 808, 60),
woosh.Token(woosh.NAME, 'vars', 808, 61, 808, 65),
woosh.Token(woosh.OP, '=', 808, 65, 808, 66),
woosh.Token(woosh.NAME, 'vars', 808, 66, 808, 70),
woosh.Token(woosh.OP, ',', 808, 70, 808, 71),
woosh.Token(woosh.OP, '**', 809, 29, 809, 31),
woosh.Token(woosh.NAME, 'kwargs', 809, 31, 809, 37),
woosh.Token(woosh.OP, ')', 809, 37, 809, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 809, 38, 810, 0),
woosh.Token(woosh.DEDENT, ' ', 810, 0, 810, 8),
woosh.Token(woosh.NAME, 'except', 810, 8, 810, 14),
woosh.Token(woosh.OP, '(', 810, 15, 810, 16),
woosh.Token(woosh.NAME, 'NoSectionError', 810, 16, 810, 30),
woosh.Token(woosh.OP, ',', 810, 30, 810, 31),
woosh.Token(woosh.NAME, 'NoOptionError', 810, 32, 810, 45),
woosh.Token(woosh.OP, ')', 810, 45, 810, 46),
woosh.Token(woosh.OP, ':', 810, 46, 810, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 810, 47, 811, 0),
woosh.Token(woosh.INDENT, ' ', 811, 0, 811, 12),
woosh.Token(woosh.NAME, 'if', 811, 12, 811, 14),
woosh.Token(woosh.NAME, 'fallback', 811, 15, 811, 23),
woosh.Token(woosh.NAME, 'is', 811, 24, 811, 26),
woosh.Token(woosh.NAME, '_UNSET', 811, 27, 811, 33),
woosh.Token(woosh.OP, ':', 811, 33, 811, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 811, 34, 812, 0),
woosh.Token(woosh.INDENT, ' ', 812, 0, 812, 16),
woosh.Token(woosh.NAME, 'raise', 812, 16, 812, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 812, 21, 813, 0),
woosh.Token(woosh.DEDENT, ' ', 813, 0, 813, 12),
woosh.Token(woosh.NAME, 'return', 813, 12, 813, 18),
woosh.Token(woosh.NAME, 'fallback', 813, 19, 813, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 813, 27, 814, 0),
woosh.Token(woosh.COMMENT, '# getint, getfloat and getboolean provided directly for backwards compat', 815, 4, 815, 76),
woosh.Token(woosh.DEDENT, ' ', 816, 0, 816, 4),
woosh.Token(woosh.DEDENT, '', 816, 4, 816, 4),
woosh.Token(woosh.NAME, 'def', 816, 4, 816, 7),
woosh.Token(woosh.NAME, 'getint', 816, 8, 816, 14),
woosh.Token(woosh.OP, '(', 816, 14, 816, 15),
woosh.Token(woosh.NAME, 'self', 816, 15, 816, 19),
woosh.Token(woosh.OP, ',', 816, 19, 816, 20),
woosh.Token(woosh.NAME, 'section', 816, 21, 816, 28),
woosh.Token(woosh.OP, ',', 816, 28, 816, 29),
woosh.Token(woosh.NAME, 'option', 816, 30, 816, 36),
woosh.Token(woosh.OP, ',', 816, 36, 816, 37),
woosh.Token(woosh.OP, '*', 816, 38, 816, 39),
woosh.Token(woosh.OP, ',', 816, 39, 816, 40),
woosh.Token(woosh.NAME, 'raw', 816, 41, 816, 44),
woosh.Token(woosh.OP, '=', 816, 44, 816, 45),
woosh.Token(woosh.NAME, 'False', 816, 45, 816, 50),
woosh.Token(woosh.OP, ',', 816, 50, 816, 51),
woosh.Token(woosh.NAME, 'vars', 816, 52, 816, 56),
woosh.Token(woosh.OP, '=', 816, 56, 816, 57),
woosh.Token(woosh.NAME, 'None', 816, 57, 816, 61),
woosh.Token(woosh.OP, ',', 816, 61, 816, 62),
woosh.Token(woosh.NAME, 'fallback', 817, 15, 817, 23),
woosh.Token(woosh.OP, '=', 817, 23, 817, 24),
woosh.Token(woosh.NAME, '_UNSET', 817, 24, 817, 30),
woosh.Token(woosh.OP, ',', 817, 30, 817, 31),
woosh.Token(woosh.OP, '**', 817, 32, 817, 34),
woosh.Token(woosh.NAME, 'kwargs', 817, 34, 817, 40),
woosh.Token(woosh.OP, ')', 817, 40, 817, 41),
woosh.Token(woosh.OP, ':', 817, 41, 817, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 817, 42, 818, 0),
woosh.Token(woosh.INDENT, ' ', 818, 0, 818, 8),
woosh.Token(woosh.NAME, 'return', 818, 8, 818, 14),
woosh.Token(woosh.NAME, 'self', 818, 15, 818, 19),
woosh.Token(woosh.OP, '.', 818, 19, 818, 20),
woosh.Token(woosh.NAME, '_get_conv', 818, 20, 818, 29),
woosh.Token(woosh.OP, '(', 818, 29, 818, 30),
woosh.Token(woosh.NAME, 'section', 818, 30, 818, 37),
woosh.Token(woosh.OP, ',', 818, 37, 818, 38),
woosh.Token(woosh.NAME, 'option', 818, 39, 818, 45),
woosh.Token(woosh.OP, ',', 818, 45, 818, 46),
woosh.Token(woosh.NAME, 'int', 818, 47, 818, 50),
woosh.Token(woosh.OP, ',', 818, 50, 818, 51),
woosh.Token(woosh.NAME, 'raw', 818, 52, 818, 55),
woosh.Token(woosh.OP, '=', 818, 55, 818, 56),
woosh.Token(woosh.NAME, 'raw', 818, 56, 818, 59),
woosh.Token(woosh.OP, ',', 818, 59, 818, 60),
woosh.Token(woosh.NAME, 'vars', 818, 61, 818, 65),
woosh.Token(woosh.OP, '=', 818, 65, 818, 66),
woosh.Token(woosh.NAME, 'vars', 818, 66, 818, 70),
woosh.Token(woosh.OP, ',', 818, 70, 818, 71),
woosh.Token(woosh.NAME, 'fallback', 819, 30, 819, 38),
woosh.Token(woosh.OP, '=', 819, 38, 819, 39),
woosh.Token(woosh.NAME, 'fallback', 819, 39, 819, 47),
woosh.Token(woosh.OP, ',', 819, 47, 819, 48),
woosh.Token(woosh.OP, '**', 819, 49, 819, 51),
woosh.Token(woosh.NAME, 'kwargs', 819, 51, 819, 57),
woosh.Token(woosh.OP, ')', 819, 57, 819, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 819, 58, 820, 0),
woosh.Token(woosh.DEDENT, ' ', 821, 0, 821, 4),
woosh.Token(woosh.NAME, 'def', 821, 4, 821, 7),
woosh.Token(woosh.NAME, 'getfloat', 821, 8, 821, 16),
woosh.Token(woosh.OP, '(', 821, 16, 821, 17),
woosh.Token(woosh.NAME, 'self', 821, 17, 821, 21),
woosh.Token(woosh.OP, ',', 821, 21, 821, 22),
woosh.Token(woosh.NAME, 'section', 821, 23, 821, 30),
woosh.Token(woosh.OP, ',', 821, 30, 821, 31),
woosh.Token(woosh.NAME, 'option', 821, 32, 821, 38),
woosh.Token(woosh.OP, ',', 821, 38, 821, 39),
woosh.Token(woosh.OP, '*', 821, 40, 821, 41),
woosh.Token(woosh.OP, ',', 821, 41, 821, 42),
woosh.Token(woosh.NAME, 'raw', 821, 43, 821, 46),
woosh.Token(woosh.OP, '=', 821, 46, 821, 47),
woosh.Token(woosh.NAME, 'False', 821, 47, 821, 52),
woosh.Token(woosh.OP, ',', 821, 52, 821, 53),
woosh.Token(woosh.NAME, 'vars', 821, 54, 821, 58),
woosh.Token(woosh.OP, '=', 821, 58, 821, 59),
woosh.Token(woosh.NAME, 'None', 821, 59, 821, 63),
woosh.Token(woosh.OP, ',', 821, 63, 821, 64),
woosh.Token(woosh.NAME, 'fallback', 822, 17, 822, 25),
woosh.Token(woosh.OP, '=', 822, 25, 822, 26),
woosh.Token(woosh.NAME, '_UNSET', 822, 26, 822, 32),
woosh.Token(woosh.OP, ',', 822, 32, 822, 33),
woosh.Token(woosh.OP, '**', 822, 34, 822, 36),
woosh.Token(woosh.NAME, 'kwargs', 822, 36, 822, 42),
woosh.Token(woosh.OP, ')', 822, 42, 822, 43),
woosh.Token(woosh.OP, ':', 822, 43, 822, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 822, 44, 823, 0),
woosh.Token(woosh.INDENT, ' ', 823, 0, 823, 8),
woosh.Token(woosh.NAME, 'return', 823, 8, 823, 14),
woosh.Token(woosh.NAME, 'self', 823, 15, 823, 19),
woosh.Token(woosh.OP, '.', 823, 19, 823, 20),
woosh.Token(woosh.NAME, '_get_conv', 823, 20, 823, 29),
woosh.Token(woosh.OP, '(', 823, 29, 823, 30),
woosh.Token(woosh.NAME, 'section', 823, 30, 823, 37),
woosh.Token(woosh.OP, ',', 823, 37, 823, 38),
woosh.Token(woosh.NAME, 'option', 823, 39, 823, 45),
woosh.Token(woosh.OP, ',', 823, 45, 823, 46),
woosh.Token(woosh.NAME, 'float', 823, 47, 823, 52),
woosh.Token(woosh.OP, ',', 823, 52, 823, 53),
woosh.Token(woosh.NAME, 'raw', 823, 54, 823, 57),
woosh.Token(woosh.OP, '=', 823, 57, 823, 58),
woosh.Token(woosh.NAME, 'raw', 823, 58, 823, 61),
woosh.Token(woosh.OP, ',', 823, 61, 823, 62),
woosh.Token(woosh.NAME, 'vars', 823, 63, 823, 67),
woosh.Token(woosh.OP, '=', 823, 67, 823, 68),
woosh.Token(woosh.NAME, 'vars', 823, 68, 823, 72),
woosh.Token(woosh.OP, ',', 823, 72, 823, 73),
woosh.Token(woosh.NAME, 'fallback', 824, 30, 824, 38),
woosh.Token(woosh.OP, '=', 824, 38, 824, 39),
woosh.Token(woosh.NAME, 'fallback', 824, 39, 824, 47),
woosh.Token(woosh.OP, ',', 824, 47, 824, 48),
woosh.Token(woosh.OP, '**', 824, 49, 824, 51),
woosh.Token(woosh.NAME, 'kwargs', 824, 51, 824, 57),
woosh.Token(woosh.OP, ')', 824, 57, 824, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 824, 58, 825, 0),
woosh.Token(woosh.DEDENT, ' ', 826, 0, 826, 4),
woosh.Token(woosh.NAME, 'def', 826, 4, 826, 7),
woosh.Token(woosh.NAME, 'getboolean', 826, 8, 826, 18),
woosh.Token(woosh.OP, '(', 826, 18, 826, 19),
woosh.Token(woosh.NAME, 'self', 826, 19, 826, 23),
woosh.Token(woosh.OP, ',', 826, 23, 826, 24),
woosh.Token(woosh.NAME, 'section', 826, 25, 826, 32),
woosh.Token(woosh.OP, ',', 826, 32, 826, 33),
woosh.Token(woosh.NAME, 'option', 826, 34, 826, 40),
woosh.Token(woosh.OP, ',', 826, 40, 826, 41),
woosh.Token(woosh.OP, '*', 826, 42, 826, 43),
woosh.Token(woosh.OP, ',', 826, 43, 826, 44),
woosh.Token(woosh.NAME, 'raw', 826, 45, 826, 48),
woosh.Token(woosh.OP, '=', 826, 48, 826, 49),
woosh.Token(woosh.NAME, 'False', 826, 49, 826, 54),
woosh.Token(woosh.OP, ',', 826, 54, 826, 55),
woosh.Token(woosh.NAME, 'vars', 826, 56, 826, 60),
woosh.Token(woosh.OP, '=', 826, 60, 826, 61),
woosh.Token(woosh.NAME, 'None', 826, 61, 826, 65),
woosh.Token(woosh.OP, ',', 826, 65, 826, 66),
woosh.Token(woosh.NAME, 'fallback', 827, 19, 827, 27),
woosh.Token(woosh.OP, '=', 827, 27, 827, 28),
woosh.Token(woosh.NAME, '_UNSET', 827, 28, 827, 34),
woosh.Token(woosh.OP, ',', 827, 34, 827, 35),
woosh.Token(woosh.OP, '**', 827, 36, 827, 38),
woosh.Token(woosh.NAME, 'kwargs', 827, 38, 827, 44),
woosh.Token(woosh.OP, ')', 827, 44, 827, 45),
woosh.Token(woosh.OP, ':', 827, 45, 827, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 827, 46, 828, 0),
woosh.Token(woosh.INDENT, ' ', 828, 0, 828, 8),
woosh.Token(woosh.NAME, 'return', 828, 8, 828, 14),
woosh.Token(woosh.NAME, 'self', 828, 15, 828, 19),
woosh.Token(woosh.OP, '.', 828, 19, 828, 20),
woosh.Token(woosh.NAME, '_get_conv', 828, 20, 828, 29),
woosh.Token(woosh.OP, '(', 828, 29, 828, 30),
woosh.Token(woosh.NAME, 'section', 828, 30, 828, 37),
woosh.Token(woosh.OP, ',', 828, 37, 828, 38),
woosh.Token(woosh.NAME, 'option', 828, 39, 828, 45),
woosh.Token(woosh.OP, ',', 828, 45, 828, 46),
woosh.Token(woosh.NAME, 'self', 828, 47, 828, 51),
woosh.Token(woosh.OP, '.', 828, 51, 828, 52),
woosh.Token(woosh.NAME, '_convert_to_boolean', 828, 52, 828, 71),
woosh.Token(woosh.OP, ',', 828, 71, 828, 72),
woosh.Token(woosh.NAME, 'raw', 829, 30, 829, 33),
woosh.Token(woosh.OP, '=', 829, 33, 829, 34),
woosh.Token(woosh.NAME, 'raw', 829, 34, 829, 37),
woosh.Token(woosh.OP, ',', 829, 37, 829, 38),
woosh.Token(woosh.NAME, 'vars', 829, 39, 829, 43),
woosh.Token(woosh.OP, '=', 829, 43, 829, 44),
woosh.Token(woosh.NAME, 'vars', 829, 44, 829, 48),
woosh.Token(woosh.OP, ',', 829, 48, 829, 49),
woosh.Token(woosh.NAME, 'fallback', 829, 50, 829, 58),
woosh.Token(woosh.OP, '=', 829, 58, 829, 59),
woosh.Token(woosh.NAME, 'fallback', 829, 59, 829, 67),
woosh.Token(woosh.OP, ',', 829, 67, 829, 68),
woosh.Token(woosh.OP, '**', 829, 69, 829, 71),
woosh.Token(woosh.NAME, 'kwargs', 829, 71, 829, 77),
woosh.Token(woosh.OP, ')', 829, 77, 829, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 829, 78, 830, 0),
woosh.Token(woosh.DEDENT, ' ', 831, 0, 831, 4),
woosh.Token(woosh.NAME, 'def', 831, 4, 831, 7),
woosh.Token(woosh.NAME, 'items', 831, 8, 831, 13),
woosh.Token(woosh.OP, '(', 831, 13, 831, 14),
woosh.Token(woosh.NAME, 'self', 831, 14, 831, 18),
woosh.Token(woosh.OP, ',', 831, 18, 831, 19),
woosh.Token(woosh.NAME, 'section', 831, 20, 831, 27),
woosh.Token(woosh.OP, '=', 831, 27, 831, 28),
woosh.Token(woosh.NAME, '_UNSET', 831, 28, 831, 34),
woosh.Token(woosh.OP, ',', 831, 34, 831, 35),
woosh.Token(woosh.NAME, 'raw', 831, 36, 831, 39),
woosh.Token(woosh.OP, '=', 831, 39, 831, 40),
woosh.Token(woosh.NAME, 'False', 831, 40, 831, 45),
woosh.Token(woosh.OP, ',', 831, 45, 831, 46),
woosh.Token(woosh.NAME, 'vars', 831, 47, 831, 51),
woosh.Token(woosh.OP, '=', 831, 51, 831, 52),
woosh.Token(woosh.NAME, 'None', 831, 52, 831, 56),
woosh.Token(woosh.OP, ')', 831, 56, 831, 57),
woosh.Token(woosh.OP, ':', 831, 57, 831, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 831, 58, 832, 0),
woosh.Token(woosh.INDENT, ' ', 832, 0, 832, 8),
woosh.Token(woosh.STRING, '"""Return a list of (name, value) tuples for each option in a section.\r\n\r\n All % interpolations are expanded in the return values, based on the\r\n defaults passed into the constructor, unless the optional argument\r\n `raw\' is true. Additional substitutions may be provided using the\r\n `vars\' argument, which must be a dictionary whose contents overrides\r\n any pre-existing defaults.\r\n\r\n The section DEFAULT is special.\r\n """', 832, 8, 841, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 841, 11, 842, 0),
woosh.Token(woosh.NAME, 'if', 842, 8, 842, 10),
woosh.Token(woosh.NAME, 'section', 842, 11, 842, 18),
woosh.Token(woosh.NAME, 'is', 842, 19, 842, 21),
woosh.Token(woosh.NAME, '_UNSET', 842, 22, 842, 28),
woosh.Token(woosh.OP, ':', 842, 28, 842, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 842, 29, 843, 0),
woosh.Token(woosh.INDENT, ' ', 843, 0, 843, 12),
woosh.Token(woosh.NAME, 'return', 843, 12, 843, 18),
woosh.Token(woosh.NAME, 'super', 843, 19, 843, 24),
woosh.Token(woosh.OP, '(', 843, 24, 843, 25),
woosh.Token(woosh.OP, ')', 843, 25, 843, 26),
woosh.Token(woosh.OP, '.', 843, 26, 843, 27),
woosh.Token(woosh.NAME, 'items', 843, 27, 843, 32),
woosh.Token(woosh.OP, '(', 843, 32, 843, 33),
woosh.Token(woosh.OP, ')', 843, 33, 843, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 843, 34, 844, 0),
woosh.Token(woosh.DEDENT, ' ', 844, 0, 844, 8),
woosh.Token(woosh.NAME, 'd', 844, 8, 844, 9),
woosh.Token(woosh.OP, '=', 844, 10, 844, 11),
woosh.Token(woosh.NAME, 'self', 844, 12, 844, 16),
woosh.Token(woosh.OP, '.', 844, 16, 844, 17),
woosh.Token(woosh.NAME, '_defaults', 844, 17, 844, 26),
woosh.Token(woosh.OP, '.', 844, 26, 844, 27),
woosh.Token(woosh.NAME, 'copy', 844, 27, 844, 31),
woosh.Token(woosh.OP, '(', 844, 31, 844, 32),
woosh.Token(woosh.OP, ')', 844, 32, 844, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 844, 33, 845, 0),
woosh.Token(woosh.NAME, 'try', 845, 8, 845, 11),
woosh.Token(woosh.OP, ':', 845, 11, 845, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 845, 12, 846, 0),
woosh.Token(woosh.INDENT, ' ', 846, 0, 846, 12),
woosh.Token(woosh.NAME, 'd', 846, 12, 846, 13),
woosh.Token(woosh.OP, '.', 846, 13, 846, 14),
woosh.Token(woosh.NAME, 'update', 846, 14, 846, 20),
woosh.Token(woosh.OP, '(', 846, 20, 846, 21),
woosh.Token(woosh.NAME, 'self', 846, 21, 846, 25),
woosh.Token(woosh.OP, '.', 846, 25, 846, 26),
woosh.Token(woosh.NAME, '_sections', 846, 26, 846, 35),
woosh.Token(woosh.OP, '[', 846, 35, 846, 36),
woosh.Token(woosh.NAME, 'section', 846, 36, 846, 43),
woosh.Token(woosh.OP, ']', 846, 43, 846, 44),
woosh.Token(woosh.OP, ')', 846, 44, 846, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 846, 45, 847, 0),
woosh.Token(woosh.DEDENT, ' ', 847, 0, 847, 8),
woosh.Token(woosh.NAME, 'except', 847, 8, 847, 14),
woosh.Token(woosh.NAME, 'KeyError', 847, 15, 847, 23),
woosh.Token(woosh.OP, ':', 847, 23, 847, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 847, 24, 848, 0),
woosh.Token(woosh.INDENT, ' ', 848, 0, 848, 12),
woosh.Token(woosh.NAME, 'if', 848, 12, 848, 14),
woosh.Token(woosh.NAME, 'section', 848, 15, 848, 22),
woosh.Token(woosh.OP, '!=', 848, 23, 848, 25),
woosh.Token(woosh.NAME, 'self', 848, 26, 848, 30),
woosh.Token(woosh.OP, '.', 848, 30, 848, 31),
woosh.Token(woosh.NAME, 'default_section', 848, 31, 848, 46),
woosh.Token(woosh.OP, ':', 848, 46, 848, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 848, 47, 849, 0),
woosh.Token(woosh.INDENT, ' ', 849, 0, 849, 16),
woosh.Token(woosh.NAME, 'raise', 849, 16, 849, 21),
woosh.Token(woosh.NAME, 'NoSectionError', 849, 22, 849, 36),
woosh.Token(woosh.OP, '(', 849, 36, 849, 37),
woosh.Token(woosh.NAME, 'section', 849, 37, 849, 44),
woosh.Token(woosh.OP, ')', 849, 44, 849, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 849, 45, 850, 0),
woosh.Token(woosh.DEDENT, ' ', 850, 0, 850, 8),
woosh.Token(woosh.DEDENT, '', 850, 8, 850, 8),
woosh.Token(woosh.NAME, 'orig_keys', 850, 8, 850, 17),
woosh.Token(woosh.OP, '=', 850, 18, 850, 19),
woosh.Token(woosh.NAME, 'list', 850, 20, 850, 24),
woosh.Token(woosh.OP, '(', 850, 24, 850, 25),
woosh.Token(woosh.NAME, 'd', 850, 25, 850, 26),
woosh.Token(woosh.OP, '.', 850, 26, 850, 27),
woosh.Token(woosh.NAME, 'keys', 850, 27, 850, 31),
woosh.Token(woosh.OP, '(', 850, 31, 850, 32),
woosh.Token(woosh.OP, ')', 850, 32, 850, 33),
woosh.Token(woosh.OP, ')', 850, 33, 850, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 850, 34, 851, 0),
woosh.Token(woosh.COMMENT, '# Update with the entry specific variables', 851, 8, 851, 50),
woosh.Token(woosh.NAME, 'if', 852, 8, 852, 10),
woosh.Token(woosh.NAME, 'vars', 852, 11, 852, 15),
woosh.Token(woosh.OP, ':', 852, 15, 852, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 852, 16, 853, 0),
woosh.Token(woosh.INDENT, ' ', 853, 0, 853, 12),
woosh.Token(woosh.NAME, 'for', 853, 12, 853, 15),
woosh.Token(woosh.NAME, 'key', 853, 16, 853, 19),
woosh.Token(woosh.OP, ',', 853, 19, 853, 20),
woosh.Token(woosh.NAME, 'value', 853, 21, 853, 26),
woosh.Token(woosh.NAME, 'in', 853, 27, 853, 29),
woosh.Token(woosh.NAME, 'vars', 853, 30, 853, 34),
woosh.Token(woosh.OP, '.', 853, 34, 853, 35),
woosh.Token(woosh.NAME, 'items', 853, 35, 853, 40),
woosh.Token(woosh.OP, '(', 853, 40, 853, 41),
woosh.Token(woosh.OP, ')', 853, 41, 853, 42),
woosh.Token(woosh.OP, ':', 853, 42, 853, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 853, 43, 854, 0),
woosh.Token(woosh.INDENT, ' ', 854, 0, 854, 16),
woosh.Token(woosh.NAME, 'd', 854, 16, 854, 17),
woosh.Token(woosh.OP, '[', 854, 17, 854, 18),
woosh.Token(woosh.NAME, 'self', 854, 18, 854, 22),
woosh.Token(woosh.OP, '.', 854, 22, 854, 23),
woosh.Token(woosh.NAME, 'optionxform', 854, 23, 854, 34),
woosh.Token(woosh.OP, '(', 854, 34, 854, 35),
woosh.Token(woosh.NAME, 'key', 854, 35, 854, 38),
woosh.Token(woosh.OP, ')', 854, 38, 854, 39),
woosh.Token(woosh.OP, ']', 854, 39, 854, 40),
woosh.Token(woosh.OP, '=', 854, 41, 854, 42),
woosh.Token(woosh.NAME, 'value', 854, 43, 854, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 854, 48, 855, 0),
woosh.Token(woosh.DEDENT, ' ', 855, 0, 855, 8),
woosh.Token(woosh.DEDENT, '', 855, 8, 855, 8),
woosh.Token(woosh.NAME, 'value_getter', 855, 8, 855, 20),
woosh.Token(woosh.OP, '=', 855, 21, 855, 22),
woosh.Token(woosh.NAME, 'lambda', 855, 23, 855, 29),
woosh.Token(woosh.NAME, 'option', 855, 30, 855, 36),
woosh.Token(woosh.OP, ':', 855, 36, 855, 37),
woosh.Token(woosh.NAME, 'self', 855, 38, 855, 42),
woosh.Token(woosh.OP, '.', 855, 42, 855, 43),
woosh.Token(woosh.NAME, '_interpolation', 855, 43, 855, 57),
woosh.Token(woosh.OP, '.', 855, 57, 855, 58),
woosh.Token(woosh.NAME, 'before_get', 855, 58, 855, 68),
woosh.Token(woosh.OP, '(', 855, 68, 855, 69),
woosh.Token(woosh.NAME, 'self', 855, 69, 855, 73),
woosh.Token(woosh.OP, ',', 855, 73, 855, 74),
woosh.Token(woosh.NAME, 'section', 856, 12, 856, 19),
woosh.Token(woosh.OP, ',', 856, 19, 856, 20),
woosh.Token(woosh.NAME, 'option', 856, 21, 856, 27),
woosh.Token(woosh.OP, ',', 856, 27, 856, 28),
woosh.Token(woosh.NAME, 'd', 856, 29, 856, 30),
woosh.Token(woosh.OP, '[', 856, 30, 856, 31),
woosh.Token(woosh.NAME, 'option', 856, 31, 856, 37),
woosh.Token(woosh.OP, ']', 856, 37, 856, 38),
woosh.Token(woosh.OP, ',', 856, 38, 856, 39),
woosh.Token(woosh.NAME, 'd', 856, 40, 856, 41),
woosh.Token(woosh.OP, ')', 856, 41, 856, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 856, 42, 857, 0),
woosh.Token(woosh.NAME, 'if', 857, 8, 857, 10),
woosh.Token(woosh.NAME, 'raw', 857, 11, 857, 14),
woosh.Token(woosh.OP, ':', 857, 14, 857, 15),
woosh.Token(woosh.NEWLINE, '\r\n', 857, 15, 858, 0),
woosh.Token(woosh.INDENT, ' ', 858, 0, 858, 12),
woosh.Token(woosh.NAME, 'value_getter', 858, 12, 858, 24),
woosh.Token(woosh.OP, '=', 858, 25, 858, 26),
woosh.Token(woosh.NAME, 'lambda', 858, 27, 858, 33),
woosh.Token(woosh.NAME, 'option', 858, 34, 858, 40),
woosh.Token(woosh.OP, ':', 858, 40, 858, 41),
woosh.Token(woosh.NAME, 'd', 858, 42, 858, 43),
woosh.Token(woosh.OP, '[', 858, 43, 858, 44),
woosh.Token(woosh.NAME, 'option', 858, 44, 858, 50),
woosh.Token(woosh.OP, ']', 858, 50, 858, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 858, 51, 859, 0),
woosh.Token(woosh.DEDENT, ' ', 859, 0, 859, 8),
woosh.Token(woosh.NAME, 'return', 859, 8, 859, 14),
woosh.Token(woosh.OP, '[', 859, 15, 859, 16),
woosh.Token(woosh.OP, '(', 859, 16, 859, 17),
woosh.Token(woosh.NAME, 'option', 859, 17, 859, 23),
woosh.Token(woosh.OP, ',', 859, 23, 859, 24),
woosh.Token(woosh.NAME, 'value_getter', 859, 25, 859, 37),
woosh.Token(woosh.OP, '(', 859, 37, 859, 38),
woosh.Token(woosh.NAME, 'option', 859, 38, 859, 44),
woosh.Token(woosh.OP, ')', 859, 44, 859, 45),
woosh.Token(woosh.OP, ')', 859, 45, 859, 46),
woosh.Token(woosh.NAME, 'for', 859, 47, 859, 50),
woosh.Token(woosh.NAME, 'option', 859, 51, 859, 57),
woosh.Token(woosh.NAME, 'in', 859, 58, 859, 60),
woosh.Token(woosh.NAME, 'orig_keys', 859, 61, 859, 70),
woosh.Token(woosh.OP, ']', 859, 70, 859, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 859, 71, 860, 0),
woosh.Token(woosh.DEDENT, ' ', 861, 0, 861, 4),
woosh.Token(woosh.NAME, 'def', 861, 4, 861, 7),
woosh.Token(woosh.NAME, 'popitem', 861, 8, 861, 15),
woosh.Token(woosh.OP, '(', 861, 15, 861, 16),
woosh.Token(woosh.NAME, 'self', 861, 16, 861, 20),
woosh.Token(woosh.OP, ')', 861, 20, 861, 21),
woosh.Token(woosh.OP, ':', 861, 21, 861, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 861, 22, 862, 0),
woosh.Token(woosh.INDENT, ' ', 862, 0, 862, 8),
woosh.Token(woosh.STRING, '"""Remove a section from the parser and return it as\r\n a (section_name, section_proxy) tuple. If no section is present, raise\r\n KeyError.\r\n\r\n The section DEFAULT is never returned because it cannot be removed.\r\n """', 862, 8, 867, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 867, 11, 868, 0),
woosh.Token(woosh.NAME, 'for', 868, 8, 868, 11),
woosh.Token(woosh.NAME, 'key', 868, 12, 868, 15),
woosh.Token(woosh.NAME, 'in', 868, 16, 868, 18),
woosh.Token(woosh.NAME, 'self', 868, 19, 868, 23),
woosh.Token(woosh.OP, '.', 868, 23, 868, 24),
woosh.Token(woosh.NAME, 'sections', 868, 24, 868, 32),
woosh.Token(woosh.OP, '(', 868, 32, 868, 33),
woosh.Token(woosh.OP, ')', 868, 33, 868, 34),
woosh.Token(woosh.OP, ':', 868, 34, 868, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 868, 35, 869, 0),
woosh.Token(woosh.INDENT, ' ', 869, 0, 869, 12),
woosh.Token(woosh.NAME, 'value', 869, 12, 869, 17),
woosh.Token(woosh.OP, '=', 869, 18, 869, 19),
woosh.Token(woosh.NAME, 'self', 869, 20, 869, 24),
woosh.Token(woosh.OP, '[', 869, 24, 869, 25),
woosh.Token(woosh.NAME, 'key', 869, 25, 869, 28),
woosh.Token(woosh.OP, ']', 869, 28, 869, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 869, 29, 870, 0),
woosh.Token(woosh.NAME, 'del', 870, 12, 870, 15),
woosh.Token(woosh.NAME, 'self', 870, 16, 870, 20),
woosh.Token(woosh.OP, '[', 870, 20, 870, 21),
woosh.Token(woosh.NAME, 'key', 870, 21, 870, 24),
woosh.Token(woosh.OP, ']', 870, 24, 870, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 870, 25, 871, 0),
woosh.Token(woosh.NAME, 'return', 871, 12, 871, 18),
woosh.Token(woosh.NAME, 'key', 871, 19, 871, 22),
woosh.Token(woosh.OP, ',', 871, 22, 871, 23),
woosh.Token(woosh.NAME, 'value', 871, 24, 871, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 871, 29, 872, 0),
woosh.Token(woosh.DEDENT, ' ', 872, 0, 872, 8),
woosh.Token(woosh.NAME, 'raise', 872, 8, 872, 13),
woosh.Token(woosh.NAME, 'KeyError', 872, 14, 872, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 872, 22, 873, 0),
woosh.Token(woosh.DEDENT, ' ', 874, 0, 874, 4),
woosh.Token(woosh.NAME, 'def', 874, 4, 874, 7),
woosh.Token(woosh.NAME, 'optionxform', 874, 8, 874, 19),
woosh.Token(woosh.OP, '(', 874, 19, 874, 20),
woosh.Token(woosh.NAME, 'self', 874, 20, 874, 24),
woosh.Token(woosh.OP, ',', 874, 24, 874, 25),
woosh.Token(woosh.NAME, 'optionstr', 874, 26, 874, 35),
woosh.Token(woosh.OP, ')', 874, 35, 874, 36),
woosh.Token(woosh.OP, ':', 874, 36, 874, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 874, 37, 875, 0),
woosh.Token(woosh.INDENT, ' ', 875, 0, 875, 8),
woosh.Token(woosh.NAME, 'return', 875, 8, 875, 14),
woosh.Token(woosh.NAME, 'optionstr', 875, 15, 875, 24),
woosh.Token(woosh.OP, '.', 875, 24, 875, 25),
woosh.Token(woosh.NAME, 'lower', 875, 25, 875, 30),
woosh.Token(woosh.OP, '(', 875, 30, 875, 31),
woosh.Token(woosh.OP, ')', 875, 31, 875, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 875, 32, 876, 0),
woosh.Token(woosh.DEDENT, ' ', 877, 0, 877, 4),
woosh.Token(woosh.NAME, 'def', 877, 4, 877, 7),
woosh.Token(woosh.NAME, 'has_option', 877, 8, 877, 18),
woosh.Token(woosh.OP, '(', 877, 18, 877, 19),
woosh.Token(woosh.NAME, 'self', 877, 19, 877, 23),
woosh.Token(woosh.OP, ',', 877, 23, 877, 24),
woosh.Token(woosh.NAME, 'section', 877, 25, 877, 32),
woosh.Token(woosh.OP, ',', 877, 32, 877, 33),
woosh.Token(woosh.NAME, 'option', 877, 34, 877, 40),
woosh.Token(woosh.OP, ')', 877, 40, 877, 41),
woosh.Token(woosh.OP, ':', 877, 41, 877, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 877, 42, 878, 0),
woosh.Token(woosh.INDENT, ' ', 878, 0, 878, 8),
woosh.Token(woosh.STRING, '"""Check for the existence of a given option in a given section.\r\n If the specified `section\' is None or an empty string, DEFAULT is\r\n assumed. If the specified `section\' does not exist, returns False."""', 878, 8, 880, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 880, 77, 881, 0),
woosh.Token(woosh.NAME, 'if', 881, 8, 881, 10),
woosh.Token(woosh.NAME, 'not', 881, 11, 881, 14),
woosh.Token(woosh.NAME, 'section', 881, 15, 881, 22),
woosh.Token(woosh.NAME, 'or', 881, 23, 881, 25),
woosh.Token(woosh.NAME, 'section', 881, 26, 881, 33),
woosh.Token(woosh.OP, '==', 881, 34, 881, 36),
woosh.Token(woosh.NAME, 'self', 881, 37, 881, 41),
woosh.Token(woosh.OP, '.', 881, 41, 881, 42),
woosh.Token(woosh.NAME, 'default_section', 881, 42, 881, 57),
woosh.Token(woosh.OP, ':', 881, 57, 881, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 881, 58, 882, 0),
woosh.Token(woosh.INDENT, ' ', 882, 0, 882, 12),
woosh.Token(woosh.NAME, 'option', 882, 12, 882, 18),
woosh.Token(woosh.OP, '=', 882, 19, 882, 20),
woosh.Token(woosh.NAME, 'self', 882, 21, 882, 25),
woosh.Token(woosh.OP, '.', 882, 25, 882, 26),
woosh.Token(woosh.NAME, 'optionxform', 882, 26, 882, 37),
woosh.Token(woosh.OP, '(', 882, 37, 882, 38),
woosh.Token(woosh.NAME, 'option', 882, 38, 882, 44),
woosh.Token(woosh.OP, ')', 882, 44, 882, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 882, 45, 883, 0),
woosh.Token(woosh.NAME, 'return', 883, 12, 883, 18),
woosh.Token(woosh.NAME, 'option', 883, 19, 883, 25),
woosh.Token(woosh.NAME, 'in', 883, 26, 883, 28),
woosh.Token(woosh.NAME, 'self', 883, 29, 883, 33),
woosh.Token(woosh.OP, '.', 883, 33, 883, 34),
woosh.Token(woosh.NAME, '_defaults', 883, 34, 883, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 883, 43, 884, 0),
woosh.Token(woosh.DEDENT, ' ', 884, 0, 884, 8),
woosh.Token(woosh.NAME, 'elif', 884, 8, 884, 12),
woosh.Token(woosh.NAME, 'section', 884, 13, 884, 20),
woosh.Token(woosh.NAME, 'not', 884, 21, 884, 24),
woosh.Token(woosh.NAME, 'in', 884, 25, 884, 27),
woosh.Token(woosh.NAME, 'self', 884, 28, 884, 32),
woosh.Token(woosh.OP, '.', 884, 32, 884, 33),
woosh.Token(woosh.NAME, '_sections', 884, 33, 884, 42),
woosh.Token(woosh.OP, ':', 884, 42, 884, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 884, 43, 885, 0),
woosh.Token(woosh.INDENT, ' ', 885, 0, 885, 12),
woosh.Token(woosh.NAME, 'return', 885, 12, 885, 18),
woosh.Token(woosh.NAME, 'False', 885, 19, 885, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 885, 24, 886, 0),
woosh.Token(woosh.DEDENT, ' ', 886, 0, 886, 8),
woosh.Token(woosh.NAME, 'else', 886, 8, 886, 12),
woosh.Token(woosh.OP, ':', 886, 12, 886, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 886, 13, 887, 0),
woosh.Token(woosh.INDENT, ' ', 887, 0, 887, 12),
woosh.Token(woosh.NAME, 'option', 887, 12, 887, 18),
woosh.Token(woosh.OP, '=', 887, 19, 887, 20),
woosh.Token(woosh.NAME, 'self', 887, 21, 887, 25),
woosh.Token(woosh.OP, '.', 887, 25, 887, 26),
woosh.Token(woosh.NAME, 'optionxform', 887, 26, 887, 37),
woosh.Token(woosh.OP, '(', 887, 37, 887, 38),
woosh.Token(woosh.NAME, 'option', 887, 38, 887, 44),
woosh.Token(woosh.OP, ')', 887, 44, 887, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 887, 45, 888, 0),
woosh.Token(woosh.NAME, 'return', 888, 12, 888, 18),
woosh.Token(woosh.OP, '(', 888, 19, 888, 20),
woosh.Token(woosh.NAME, 'option', 888, 20, 888, 26),
woosh.Token(woosh.NAME, 'in', 888, 27, 888, 29),
woosh.Token(woosh.NAME, 'self', 888, 30, 888, 34),
woosh.Token(woosh.OP, '.', 888, 34, 888, 35),
woosh.Token(woosh.NAME, '_sections', 888, 35, 888, 44),
woosh.Token(woosh.OP, '[', 888, 44, 888, 45),
woosh.Token(woosh.NAME, 'section', 888, 45, 888, 52),
woosh.Token(woosh.OP, ']', 888, 52, 888, 53),
woosh.Token(woosh.NAME, 'or', 889, 20, 889, 22),
woosh.Token(woosh.NAME, 'option', 889, 23, 889, 29),
woosh.Token(woosh.NAME, 'in', 889, 30, 889, 32),
woosh.Token(woosh.NAME, 'self', 889, 33, 889, 37),
woosh.Token(woosh.OP, '.', 889, 37, 889, 38),
woosh.Token(woosh.NAME, '_defaults', 889, 38, 889, 47),
woosh.Token(woosh.OP, ')', 889, 47, 889, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 889, 48, 890, 0),
woosh.Token(woosh.DEDENT, ' ', 891, 0, 891, 4),
woosh.Token(woosh.DEDENT, '', 891, 4, 891, 4),
woosh.Token(woosh.NAME, 'def', 891, 4, 891, 7),
woosh.Token(woosh.NAME, 'set', 891, 8, 891, 11),
woosh.Token(woosh.OP, '(', 891, 11, 891, 12),
woosh.Token(woosh.NAME, 'self', 891, 12, 891, 16),
woosh.Token(woosh.OP, ',', 891, 16, 891, 17),
woosh.Token(woosh.NAME, 'section', 891, 18, 891, 25),
woosh.Token(woosh.OP, ',', 891, 25, 891, 26),
woosh.Token(woosh.NAME, 'option', 891, 27, 891, 33),
woosh.Token(woosh.OP, ',', 891, 33, 891, 34),
woosh.Token(woosh.NAME, 'value', 891, 35, 891, 40),
woosh.Token(woosh.OP, '=', 891, 40, 891, 41),
woosh.Token(woosh.NAME, 'None', 891, 41, 891, 45),
woosh.Token(woosh.OP, ')', 891, 45, 891, 46),
woosh.Token(woosh.OP, ':', 891, 46, 891, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 891, 47, 892, 0),
woosh.Token(woosh.INDENT, ' ', 892, 0, 892, 8),
woosh.Token(woosh.STRING, '"""Set an option."""', 892, 8, 892, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 892, 28, 893, 0),
woosh.Token(woosh.NAME, 'if', 893, 8, 893, 10),
woosh.Token(woosh.NAME, 'value', 893, 11, 893, 16),
woosh.Token(woosh.OP, ':', 893, 16, 893, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 893, 17, 894, 0),
woosh.Token(woosh.INDENT, ' ', 894, 0, 894, 12),
woosh.Token(woosh.NAME, 'value', 894, 12, 894, 17),
woosh.Token(woosh.OP, '=', 894, 18, 894, 19),
woosh.Token(woosh.NAME, 'self', 894, 20, 894, 24),
woosh.Token(woosh.OP, '.', 894, 24, 894, 25),
woosh.Token(woosh.NAME, '_interpolation', 894, 25, 894, 39),
woosh.Token(woosh.OP, '.', 894, 39, 894, 40),
woosh.Token(woosh.NAME, 'before_set', 894, 40, 894, 50),
woosh.Token(woosh.OP, '(', 894, 50, 894, 51),
woosh.Token(woosh.NAME, 'self', 894, 51, 894, 55),
woosh.Token(woosh.OP, ',', 894, 55, 894, 56),
woosh.Token(woosh.NAME, 'section', 894, 57, 894, 64),
woosh.Token(woosh.OP, ',', 894, 64, 894, 65),
woosh.Token(woosh.NAME, 'option', 894, 66, 894, 72),
woosh.Token(woosh.OP, ',', 894, 72, 894, 73),
woosh.Token(woosh.NAME, 'value', 895, 51, 895, 56),
woosh.Token(woosh.OP, ')', 895, 56, 895, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 895, 57, 896, 0),
woosh.Token(woosh.DEDENT, ' ', 896, 0, 896, 8),
woosh.Token(woosh.NAME, 'if', 896, 8, 896, 10),
woosh.Token(woosh.NAME, 'not', 896, 11, 896, 14),
woosh.Token(woosh.NAME, 'section', 896, 15, 896, 22),
woosh.Token(woosh.NAME, 'or', 896, 23, 896, 25),
woosh.Token(woosh.NAME, 'section', 896, 26, 896, 33),
woosh.Token(woosh.OP, '==', 896, 34, 896, 36),
woosh.Token(woosh.NAME, 'self', 896, 37, 896, 41),
woosh.Token(woosh.OP, '.', 896, 41, 896, 42),
woosh.Token(woosh.NAME, 'default_section', 896, 42, 896, 57),
woosh.Token(woosh.OP, ':', 896, 57, 896, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 896, 58, 897, 0),
woosh.Token(woosh.INDENT, ' ', 897, 0, 897, 12),
woosh.Token(woosh.NAME, 'sectdict', 897, 12, 897, 20),
woosh.Token(woosh.OP, '=', 897, 21, 897, 22),
woosh.Token(woosh.NAME, 'self', 897, 23, 897, 27),
woosh.Token(woosh.OP, '.', 897, 27, 897, 28),
woosh.Token(woosh.NAME, '_defaults', 897, 28, 897, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 897, 37, 898, 0),
woosh.Token(woosh.DEDENT, ' ', 898, 0, 898, 8),
woosh.Token(woosh.NAME, 'else', 898, 8, 898, 12),
woosh.Token(woosh.OP, ':', 898, 12, 898, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 898, 13, 899, 0),
woosh.Token(woosh.INDENT, ' ', 899, 0, 899, 12),
woosh.Token(woosh.NAME, 'try', 899, 12, 899, 15),
woosh.Token(woosh.OP, ':', 899, 15, 899, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 899, 16, 900, 0),
woosh.Token(woosh.INDENT, ' ', 900, 0, 900, 16),
woosh.Token(woosh.NAME, 'sectdict', 900, 16, 900, 24),
woosh.Token(woosh.OP, '=', 900, 25, 900, 26),
woosh.Token(woosh.NAME, 'self', 900, 27, 900, 31),
woosh.Token(woosh.OP, '.', 900, 31, 900, 32),
woosh.Token(woosh.NAME, '_sections', 900, 32, 900, 41),
woosh.Token(woosh.OP, '[', 900, 41, 900, 42),
woosh.Token(woosh.NAME, 'section', 900, 42, 900, 49),
woosh.Token(woosh.OP, ']', 900, 49, 900, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 900, 50, 901, 0),
woosh.Token(woosh.DEDENT, ' ', 901, 0, 901, 12),
woosh.Token(woosh.NAME, 'except', 901, 12, 901, 18),
woosh.Token(woosh.NAME, 'KeyError', 901, 19, 901, 27),
woosh.Token(woosh.OP, ':', 901, 27, 901, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 901, 28, 902, 0),
woosh.Token(woosh.INDENT, ' ', 902, 0, 902, 16),
woosh.Token(woosh.NAME, 'raise', 902, 16, 902, 21),
woosh.Token(woosh.NAME, 'NoSectionError', 902, 22, 902, 36),
woosh.Token(woosh.OP, '(', 902, 36, 902, 37),
woosh.Token(woosh.NAME, 'section', 902, 37, 902, 44),
woosh.Token(woosh.OP, ')', 902, 44, 902, 45),
woosh.Token(woosh.NAME, 'from', 902, 46, 902, 50),
woosh.Token(woosh.NAME, 'None', 902, 51, 902, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 902, 55, 903, 0),
woosh.Token(woosh.DEDENT, ' ', 903, 0, 903, 8),
woosh.Token(woosh.DEDENT, '', 903, 8, 903, 8),
woosh.Token(woosh.NAME, 'sectdict', 903, 8, 903, 16),
woosh.Token(woosh.OP, '[', 903, 16, 903, 17),
woosh.Token(woosh.NAME, 'self', 903, 17, 903, 21),
woosh.Token(woosh.OP, '.', 903, 21, 903, 22),
woosh.Token(woosh.NAME, 'optionxform', 903, 22, 903, 33),
woosh.Token(woosh.OP, '(', 903, 33, 903, 34),
woosh.Token(woosh.NAME, 'option', 903, 34, 903, 40),
woosh.Token(woosh.OP, ')', 903, 40, 903, 41),
woosh.Token(woosh.OP, ']', 903, 41, 903, 42),
woosh.Token(woosh.OP, '=', 903, 43, 903, 44),
woosh.Token(woosh.NAME, 'value', 903, 45, 903, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 903, 50, 904, 0),
woosh.Token(woosh.DEDENT, ' ', 905, 0, 905, 4),
woosh.Token(woosh.NAME, 'def', 905, 4, 905, 7),
woosh.Token(woosh.NAME, 'write', 905, 8, 905, 13),
woosh.Token(woosh.OP, '(', 905, 13, 905, 14),
woosh.Token(woosh.NAME, 'self', 905, 14, 905, 18),
woosh.Token(woosh.OP, ',', 905, 18, 905, 19),
woosh.Token(woosh.NAME, 'fp', 905, 20, 905, 22),
woosh.Token(woosh.OP, ',', 905, 22, 905, 23),
woosh.Token(woosh.NAME, 'space_around_delimiters', 905, 24, 905, 47),
woosh.Token(woosh.OP, '=', 905, 47, 905, 48),
woosh.Token(woosh.NAME, 'True', 905, 48, 905, 52),
woosh.Token(woosh.OP, ')', 905, 52, 905, 53),
woosh.Token(woosh.OP, ':', 905, 53, 905, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 905, 54, 906, 0),
woosh.Token(woosh.INDENT, ' ', 906, 0, 906, 8),
woosh.Token(woosh.STRING, '"""Write an .ini-format representation of the configuration state.\r\n\r\n If `space_around_delimiters\' is True (the default), delimiters\r\n between keys and values are surrounded by spaces.\r\n """', 906, 8, 910, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 910, 11, 911, 0),
woosh.Token(woosh.NAME, 'if', 911, 8, 911, 10),
woosh.Token(woosh.NAME, 'space_around_delimiters', 911, 11, 911, 34),
woosh.Token(woosh.OP, ':', 911, 34, 911, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 911, 35, 912, 0),
woosh.Token(woosh.INDENT, ' ', 912, 0, 912, 12),
woosh.Token(woosh.NAME, 'd', 912, 12, 912, 13),
woosh.Token(woosh.OP, '=', 912, 14, 912, 15),
woosh.Token(woosh.STRING, '" {} "', 912, 16, 912, 22),
woosh.Token(woosh.OP, '.', 912, 22, 912, 23),
woosh.Token(woosh.NAME, 'format', 912, 23, 912, 29),
woosh.Token(woosh.OP, '(', 912, 29, 912, 30),
woosh.Token(woosh.NAME, 'self', 912, 30, 912, 34),
woosh.Token(woosh.OP, '.', 912, 34, 912, 35),
woosh.Token(woosh.NAME, '_delimiters', 912, 35, 912, 46),
woosh.Token(woosh.OP, '[', 912, 46, 912, 47),
woosh.Token(woosh.NUMBER, '0', 912, 47, 912, 48),
woosh.Token(woosh.OP, ']', 912, 48, 912, 49),
woosh.Token(woosh.OP, ')', 912, 49, 912, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 912, 50, 913, 0),
woosh.Token(woosh.DEDENT, ' ', 913, 0, 913, 8),
woosh.Token(woosh.NAME, 'else', 913, 8, 913, 12),
woosh.Token(woosh.OP, ':', 913, 12, 913, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 913, 13, 914, 0),
woosh.Token(woosh.INDENT, ' ', 914, 0, 914, 12),
woosh.Token(woosh.NAME, 'd', 914, 12, 914, 13),
woosh.Token(woosh.OP, '=', 914, 14, 914, 15),
woosh.Token(woosh.NAME, 'self', 914, 16, 914, 20),
woosh.Token(woosh.OP, '.', 914, 20, 914, 21),
woosh.Token(woosh.NAME, '_delimiters', 914, 21, 914, 32),
woosh.Token(woosh.OP, '[', 914, 32, 914, 33),
woosh.Token(woosh.NUMBER, '0', 914, 33, 914, 34),
woosh.Token(woosh.OP, ']', 914, 34, 914, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 914, 35, 915, 0),
woosh.Token(woosh.DEDENT, ' ', 915, 0, 915, 8),
woosh.Token(woosh.NAME, 'if', 915, 8, 915, 10),
woosh.Token(woosh.NAME, 'self', 915, 11, 915, 15),
woosh.Token(woosh.OP, '.', 915, 15, 915, 16),
woosh.Token(woosh.NAME, '_defaults', 915, 16, 915, 25),
woosh.Token(woosh.OP, ':', 915, 25, 915, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 915, 26, 916, 0),
woosh.Token(woosh.INDENT, ' ', 916, 0, 916, 12),
woosh.Token(woosh.NAME, 'self', 916, 12, 916, 16),
woosh.Token(woosh.OP, '.', 916, 16, 916, 17),
woosh.Token(woosh.NAME, '_write_section', 916, 17, 916, 31),
woosh.Token(woosh.OP, '(', 916, 31, 916, 32),
woosh.Token(woosh.NAME, 'fp', 916, 32, 916, 34),
woosh.Token(woosh.OP, ',', 916, 34, 916, 35),
woosh.Token(woosh.NAME, 'self', 916, 36, 916, 40),
woosh.Token(woosh.OP, '.', 916, 40, 916, 41),
woosh.Token(woosh.NAME, 'default_section', 916, 41, 916, 56),
woosh.Token(woosh.OP, ',', 916, 56, 916, 57),
woosh.Token(woosh.NAME, 'self', 917, 36, 917, 40),
woosh.Token(woosh.OP, '.', 917, 40, 917, 41),
woosh.Token(woosh.NAME, '_defaults', 917, 41, 917, 50),
woosh.Token(woosh.OP, '.', 917, 50, 917, 51),
woosh.Token(woosh.NAME, 'items', 917, 51, 917, 56),
woosh.Token(woosh.OP, '(', 917, 56, 917, 57),
woosh.Token(woosh.OP, ')', 917, 57, 917, 58),
woosh.Token(woosh.OP, ',', 917, 58, 917, 59),
woosh.Token(woosh.NAME, 'd', 917, 60, 917, 61),
woosh.Token(woosh.OP, ')', 917, 61, 917, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 917, 62, 918, 0),
woosh.Token(woosh.DEDENT, ' ', 918, 0, 918, 8),
woosh.Token(woosh.NAME, 'for', 918, 8, 918, 11),
woosh.Token(woosh.NAME, 'section', 918, 12, 918, 19),
woosh.Token(woosh.NAME, 'in', 918, 20, 918, 22),
woosh.Token(woosh.NAME, 'self', 918, 23, 918, 27),
woosh.Token(woosh.OP, '.', 918, 27, 918, 28),
woosh.Token(woosh.NAME, '_sections', 918, 28, 918, 37),
woosh.Token(woosh.OP, ':', 918, 37, 918, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 918, 38, 919, 0),
woosh.Token(woosh.INDENT, ' ', 919, 0, 919, 12),
woosh.Token(woosh.NAME, 'self', 919, 12, 919, 16),
woosh.Token(woosh.OP, '.', 919, 16, 919, 17),
woosh.Token(woosh.NAME, '_write_section', 919, 17, 919, 31),
woosh.Token(woosh.OP, '(', 919, 31, 919, 32),
woosh.Token(woosh.NAME, 'fp', 919, 32, 919, 34),
woosh.Token(woosh.OP, ',', 919, 34, 919, 35),
woosh.Token(woosh.NAME, 'section', 919, 36, 919, 43),
woosh.Token(woosh.OP, ',', 919, 43, 919, 44),
woosh.Token(woosh.NAME, 'self', 920, 32, 920, 36),
woosh.Token(woosh.OP, '.', 920, 36, 920, 37),
woosh.Token(woosh.NAME, '_sections', 920, 37, 920, 46),
woosh.Token(woosh.OP, '[', 920, 46, 920, 47),
woosh.Token(woosh.NAME, 'section', 920, 47, 920, 54),
woosh.Token(woosh.OP, ']', 920, 54, 920, 55),
woosh.Token(woosh.OP, '.', 920, 55, 920, 56),
woosh.Token(woosh.NAME, 'items', 920, 56, 920, 61),
woosh.Token(woosh.OP, '(', 920, 61, 920, 62),
woosh.Token(woosh.OP, ')', 920, 62, 920, 63),
woosh.Token(woosh.OP, ',', 920, 63, 920, 64),
woosh.Token(woosh.NAME, 'd', 920, 65, 920, 66),
woosh.Token(woosh.OP, ')', 920, 66, 920, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 920, 67, 921, 0),
woosh.Token(woosh.DEDENT, ' ', 922, 0, 922, 4),
woosh.Token(woosh.DEDENT, '', 922, 4, 922, 4),
woosh.Token(woosh.NAME, 'def', 922, 4, 922, 7),
woosh.Token(woosh.NAME, '_write_section', 922, 8, 922, 22),
woosh.Token(woosh.OP, '(', 922, 22, 922, 23),
woosh.Token(woosh.NAME, 'self', 922, 23, 922, 27),
woosh.Token(woosh.OP, ',', 922, 27, 922, 28),
woosh.Token(woosh.NAME, 'fp', 922, 29, 922, 31),
woosh.Token(woosh.OP, ',', 922, 31, 922, 32),
woosh.Token(woosh.NAME, 'section_name', 922, 33, 922, 45),
woosh.Token(woosh.OP, ',', 922, 45, 922, 46),
woosh.Token(woosh.NAME, 'section_items', 922, 47, 922, 60),
woosh.Token(woosh.OP, ',', 922, 60, 922, 61),
woosh.Token(woosh.NAME, 'delimiter', 922, 62, 922, 71),
woosh.Token(woosh.OP, ')', 922, 71, 922, 72),
woosh.Token(woosh.OP, ':', 922, 72, 922, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 922, 73, 923, 0),
woosh.Token(woosh.INDENT, ' ', 923, 0, 923, 8),
woosh.Token(woosh.STRING, '"""Write a single section to the specified `fp\'."""', 923, 8, 923, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 923, 59, 924, 0),
woosh.Token(woosh.NAME, 'fp', 924, 8, 924, 10),
woosh.Token(woosh.OP, '.', 924, 10, 924, 11),
woosh.Token(woosh.NAME, 'write', 924, 11, 924, 16),
woosh.Token(woosh.OP, '(', 924, 16, 924, 17),
woosh.Token(woosh.STRING, '"[{}]\\n"', 924, 17, 924, 25),
woosh.Token(woosh.OP, '.', 924, 25, 924, 26),
woosh.Token(woosh.NAME, 'format', 924, 26, 924, 32),
woosh.Token(woosh.OP, '(', 924, 32, 924, 33),
woosh.Token(woosh.NAME, 'section_name', 924, 33, 924, 45),
woosh.Token(woosh.OP, ')', 924, 45, 924, 46),
woosh.Token(woosh.OP, ')', 924, 46, 924, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 924, 47, 925, 0),
woosh.Token(woosh.NAME, 'for', 925, 8, 925, 11),
woosh.Token(woosh.NAME, 'key', 925, 12, 925, 15),
woosh.Token(woosh.OP, ',', 925, 15, 925, 16),
woosh.Token(woosh.NAME, 'value', 925, 17, 925, 22),
woosh.Token(woosh.NAME, 'in', 925, 23, 925, 25),
woosh.Token(woosh.NAME, 'section_items', 925, 26, 925, 39),
woosh.Token(woosh.OP, ':', 925, 39, 925, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 925, 40, 926, 0),
woosh.Token(woosh.INDENT, ' ', 926, 0, 926, 12),
woosh.Token(woosh.NAME, 'value', 926, 12, 926, 17),
woosh.Token(woosh.OP, '=', 926, 18, 926, 19),
woosh.Token(woosh.NAME, 'self', 926, 20, 926, 24),
woosh.Token(woosh.OP, '.', 926, 24, 926, 25),
woosh.Token(woosh.NAME, '_interpolation', 926, 25, 926, 39),
woosh.Token(woosh.OP, '.', 926, 39, 926, 40),
woosh.Token(woosh.NAME, 'before_write', 926, 40, 926, 52),
woosh.Token(woosh.OP, '(', 926, 52, 926, 53),
woosh.Token(woosh.NAME, 'self', 926, 53, 926, 57),
woosh.Token(woosh.OP, ',', 926, 57, 926, 58),
woosh.Token(woosh.NAME, 'section_name', 926, 59, 926, 71),
woosh.Token(woosh.OP, ',', 926, 71, 926, 72),
woosh.Token(woosh.NAME, 'key', 926, 73, 926, 76),
woosh.Token(woosh.OP, ',', 926, 76, 926, 77),
woosh.Token(woosh.NAME, 'value', 927, 53, 927, 58),
woosh.Token(woosh.OP, ')', 927, 58, 927, 59),
woosh.Token(woosh.NEWLINE, '\r\n', 927, 59, 928, 0),
woosh.Token(woosh.NAME, 'if', 928, 12, 928, 14),
woosh.Token(woosh.NAME, 'value', 928, 15, 928, 20),
woosh.Token(woosh.NAME, 'is', 928, 21, 928, 23),
woosh.Token(woosh.NAME, 'not', 928, 24, 928, 27),
woosh.Token(woosh.NAME, 'None', 928, 28, 928, 32),
woosh.Token(woosh.NAME, 'or', 928, 33, 928, 35),
woosh.Token(woosh.NAME, 'not', 928, 36, 928, 39),
woosh.Token(woosh.NAME, 'self', 928, 40, 928, 44),
woosh.Token(woosh.OP, '.', 928, 44, 928, 45),
woosh.Token(woosh.NAME, '_allow_no_value', 928, 45, 928, 60),
woosh.Token(woosh.OP, ':', 928, 60, 928, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 928, 61, 929, 0),
woosh.Token(woosh.INDENT, ' ', 929, 0, 929, 16),
woosh.Token(woosh.NAME, 'value', 929, 16, 929, 21),
woosh.Token(woosh.OP, '=', 929, 22, 929, 23),
woosh.Token(woosh.NAME, 'delimiter', 929, 24, 929, 33),
woosh.Token(woosh.OP, '+', 929, 34, 929, 35),
woosh.Token(woosh.NAME, 'str', 929, 36, 929, 39),
woosh.Token(woosh.OP, '(', 929, 39, 929, 40),
woosh.Token(woosh.NAME, 'value', 929, 40, 929, 45),
woosh.Token(woosh.OP, ')', 929, 45, 929, 46),
woosh.Token(woosh.OP, '.', 929, 46, 929, 47),
woosh.Token(woosh.NAME, 'replace', 929, 47, 929, 54),
woosh.Token(woosh.OP, '(', 929, 54, 929, 55),
woosh.Token(woosh.STRING, "'\\n'", 929, 55, 929, 59),
woosh.Token(woosh.OP, ',', 929, 59, 929, 60),
woosh.Token(woosh.STRING, "'\\n\\t'", 929, 61, 929, 67),
woosh.Token(woosh.OP, ')', 929, 67, 929, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 929, 68, 930, 0),
woosh.Token(woosh.DEDENT, ' ', 930, 0, 930, 12),
woosh.Token(woosh.NAME, 'else', 930, 12, 930, 16),
woosh.Token(woosh.OP, ':', 930, 16, 930, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 930, 17, 931, 0),
woosh.Token(woosh.INDENT, ' ', 931, 0, 931, 16),
woosh.Token(woosh.NAME, 'value', 931, 16, 931, 21),
woosh.Token(woosh.OP, '=', 931, 22, 931, 23),
woosh.Token(woosh.STRING, '""', 931, 24, 931, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 931, 26, 932, 0),
woosh.Token(woosh.DEDENT, ' ', 932, 0, 932, 12),
woosh.Token(woosh.NAME, 'fp', 932, 12, 932, 14),
woosh.Token(woosh.OP, '.', 932, 14, 932, 15),
woosh.Token(woosh.NAME, 'write', 932, 15, 932, 20),
woosh.Token(woosh.OP, '(', 932, 20, 932, 21),
woosh.Token(woosh.STRING, '"{}{}\\n"', 932, 21, 932, 29),
woosh.Token(woosh.OP, '.', 932, 29, 932, 30),
woosh.Token(woosh.NAME, 'format', 932, 30, 932, 36),
woosh.Token(woosh.OP, '(', 932, 36, 932, 37),
woosh.Token(woosh.NAME, 'key', 932, 37, 932, 40),
woosh.Token(woosh.OP, ',', 932, 40, 932, 41),
woosh.Token(woosh.NAME, 'value', 932, 42, 932, 47),
woosh.Token(woosh.OP, ')', 932, 47, 932, 48),
woosh.Token(woosh.OP, ')', 932, 48, 932, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 932, 49, 933, 0),
woosh.Token(woosh.DEDENT, ' ', 933, 0, 933, 8),
woosh.Token(woosh.NAME, 'fp', 933, 8, 933, 10),
woosh.Token(woosh.OP, '.', 933, 10, 933, 11),
woosh.Token(woosh.NAME, 'write', 933, 11, 933, 16),
woosh.Token(woosh.OP, '(', 933, 16, 933, 17),
woosh.Token(woosh.STRING, '"\\n"', 933, 17, 933, 21),
woosh.Token(woosh.OP, ')', 933, 21, 933, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 933, 22, 934, 0),
woosh.Token(woosh.DEDENT, ' ', 935, 0, 935, 4),
woosh.Token(woosh.NAME, 'def', 935, 4, 935, 7),
woosh.Token(woosh.NAME, 'remove_option', 935, 8, 935, 21),
woosh.Token(woosh.OP, '(', 935, 21, 935, 22),
woosh.Token(woosh.NAME, 'self', 935, 22, 935, 26),
woosh.Token(woosh.OP, ',', 935, 26, 935, 27),
woosh.Token(woosh.NAME, 'section', 935, 28, 935, 35),
woosh.Token(woosh.OP, ',', 935, 35, 935, 36),
woosh.Token(woosh.NAME, 'option', 935, 37, 935, 43),
woosh.Token(woosh.OP, ')', 935, 43, 935, 44),
woosh.Token(woosh.OP, ':', 935, 44, 935, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 935, 45, 936, 0),
woosh.Token(woosh.INDENT, ' ', 936, 0, 936, 8),
woosh.Token(woosh.STRING, '"""Remove an option."""', 936, 8, 936, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 936, 31, 937, 0),
woosh.Token(woosh.NAME, 'if', 937, 8, 937, 10),
woosh.Token(woosh.NAME, 'not', 937, 11, 937, 14),
woosh.Token(woosh.NAME, 'section', 937, 15, 937, 22),
woosh.Token(woosh.NAME, 'or', 937, 23, 937, 25),
woosh.Token(woosh.NAME, 'section', 937, 26, 937, 33),
woosh.Token(woosh.OP, '==', 937, 34, 937, 36),
woosh.Token(woosh.NAME, 'self', 937, 37, 937, 41),
woosh.Token(woosh.OP, '.', 937, 41, 937, 42),
woosh.Token(woosh.NAME, 'default_section', 937, 42, 937, 57),
woosh.Token(woosh.OP, ':', 937, 57, 937, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 937, 58, 938, 0),
woosh.Token(woosh.INDENT, ' ', 938, 0, 938, 12),
woosh.Token(woosh.NAME, 'sectdict', 938, 12, 938, 20),
woosh.Token(woosh.OP, '=', 938, 21, 938, 22),
woosh.Token(woosh.NAME, 'self', 938, 23, 938, 27),
woosh.Token(woosh.OP, '.', 938, 27, 938, 28),
woosh.Token(woosh.NAME, '_defaults', 938, 28, 938, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 938, 37, 939, 0),
woosh.Token(woosh.DEDENT, ' ', 939, 0, 939, 8),
woosh.Token(woosh.NAME, 'else', 939, 8, 939, 12),
woosh.Token(woosh.OP, ':', 939, 12, 939, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 939, 13, 940, 0),
woosh.Token(woosh.INDENT, ' ', 940, 0, 940, 12),
woosh.Token(woosh.NAME, 'try', 940, 12, 940, 15),
woosh.Token(woosh.OP, ':', 940, 15, 940, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 940, 16, 941, 0),
woosh.Token(woosh.INDENT, ' ', 941, 0, 941, 16),
woosh.Token(woosh.NAME, 'sectdict', 941, 16, 941, 24),
woosh.Token(woosh.OP, '=', 941, 25, 941, 26),
woosh.Token(woosh.NAME, 'self', 941, 27, 941, 31),
woosh.Token(woosh.OP, '.', 941, 31, 941, 32),
woosh.Token(woosh.NAME, '_sections', 941, 32, 941, 41),
woosh.Token(woosh.OP, '[', 941, 41, 941, 42),
woosh.Token(woosh.NAME, 'section', 941, 42, 941, 49),
woosh.Token(woosh.OP, ']', 941, 49, 941, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 941, 50, 942, 0),
woosh.Token(woosh.DEDENT, ' ', 942, 0, 942, 12),
woosh.Token(woosh.NAME, 'except', 942, 12, 942, 18),
woosh.Token(woosh.NAME, 'KeyError', 942, 19, 942, 27),
woosh.Token(woosh.OP, ':', 942, 27, 942, 28),
woosh.Token(woosh.NEWLINE, '\r\n', 942, 28, 943, 0),
woosh.Token(woosh.INDENT, ' ', 943, 0, 943, 16),
woosh.Token(woosh.NAME, 'raise', 943, 16, 943, 21),
woosh.Token(woosh.NAME, 'NoSectionError', 943, 22, 943, 36),
woosh.Token(woosh.OP, '(', 943, 36, 943, 37),
woosh.Token(woosh.NAME, 'section', 943, 37, 943, 44),
woosh.Token(woosh.OP, ')', 943, 44, 943, 45),
woosh.Token(woosh.NAME, 'from', 943, 46, 943, 50),
woosh.Token(woosh.NAME, 'None', 943, 51, 943, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 943, 55, 944, 0),
woosh.Token(woosh.DEDENT, ' ', 944, 0, 944, 8),
woosh.Token(woosh.DEDENT, '', 944, 8, 944, 8),
woosh.Token(woosh.NAME, 'option', 944, 8, 944, 14),
woosh.Token(woosh.OP, '=', 944, 15, 944, 16),
woosh.Token(woosh.NAME, 'self', 944, 17, 944, 21),
woosh.Token(woosh.OP, '.', 944, 21, 944, 22),
woosh.Token(woosh.NAME, 'optionxform', 944, 22, 944, 33),
woosh.Token(woosh.OP, '(', 944, 33, 944, 34),
woosh.Token(woosh.NAME, 'option', 944, 34, 944, 40),
woosh.Token(woosh.OP, ')', 944, 40, 944, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 944, 41, 945, 0),
woosh.Token(woosh.NAME, 'existed', 945, 8, 945, 15),
woosh.Token(woosh.OP, '=', 945, 16, 945, 17),
woosh.Token(woosh.NAME, 'option', 945, 18, 945, 24),
woosh.Token(woosh.NAME, 'in', 945, 25, 945, 27),
woosh.Token(woosh.NAME, 'sectdict', 945, 28, 945, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 945, 36, 946, 0),
woosh.Token(woosh.NAME, 'if', 946, 8, 946, 10),
woosh.Token(woosh.NAME, 'existed', 946, 11, 946, 18),
woosh.Token(woosh.OP, ':', 946, 18, 946, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 946, 19, 947, 0),
woosh.Token(woosh.INDENT, ' ', 947, 0, 947, 12),
woosh.Token(woosh.NAME, 'del', 947, 12, 947, 15),
woosh.Token(woosh.NAME, 'sectdict', 947, 16, 947, 24),
woosh.Token(woosh.OP, '[', 947, 24, 947, 25),
woosh.Token(woosh.NAME, 'option', 947, 25, 947, 31),
woosh.Token(woosh.OP, ']', 947, 31, 947, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 947, 32, 948, 0),
woosh.Token(woosh.DEDENT, ' ', 948, 0, 948, 8),
woosh.Token(woosh.NAME, 'return', 948, 8, 948, 14),
woosh.Token(woosh.NAME, 'existed', 948, 15, 948, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 948, 22, 949, 0),
woosh.Token(woosh.DEDENT, ' ', 950, 0, 950, 4),
woosh.Token(woosh.NAME, 'def', 950, 4, 950, 7),
woosh.Token(woosh.NAME, 'remove_section', 950, 8, 950, 22),
woosh.Token(woosh.OP, '(', 950, 22, 950, 23),
woosh.Token(woosh.NAME, 'self', 950, 23, 950, 27),
woosh.Token(woosh.OP, ',', 950, 27, 950, 28),
woosh.Token(woosh.NAME, 'section', 950, 29, 950, 36),
woosh.Token(woosh.OP, ')', 950, 36, 950, 37),
woosh.Token(woosh.OP, ':', 950, 37, 950, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 950, 38, 951, 0),
woosh.Token(woosh.INDENT, ' ', 951, 0, 951, 8),
woosh.Token(woosh.STRING, '"""Remove a file section."""', 951, 8, 951, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 951, 36, 952, 0),
woosh.Token(woosh.NAME, 'existed', 952, 8, 952, 15),
woosh.Token(woosh.OP, '=', 952, 16, 952, 17),
woosh.Token(woosh.NAME, 'section', 952, 18, 952, 25),
woosh.Token(woosh.NAME, 'in', 952, 26, 952, 28),
woosh.Token(woosh.NAME, 'self', 952, 29, 952, 33),
woosh.Token(woosh.OP, '.', 952, 33, 952, 34),
woosh.Token(woosh.NAME, '_sections', 952, 34, 952, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 952, 43, 953, 0),
woosh.Token(woosh.NAME, 'if', 953, 8, 953, 10),
woosh.Token(woosh.NAME, 'existed', 953, 11, 953, 18),
woosh.Token(woosh.OP, ':', 953, 18, 953, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 953, 19, 954, 0),
woosh.Token(woosh.INDENT, ' ', 954, 0, 954, 12),
woosh.Token(woosh.NAME, 'del', 954, 12, 954, 15),
woosh.Token(woosh.NAME, 'self', 954, 16, 954, 20),
woosh.Token(woosh.OP, '.', 954, 20, 954, 21),
woosh.Token(woosh.NAME, '_sections', 954, 21, 954, 30),
woosh.Token(woosh.OP, '[', 954, 30, 954, 31),
woosh.Token(woosh.NAME, 'section', 954, 31, 954, 38),
woosh.Token(woosh.OP, ']', 954, 38, 954, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 954, 39, 955, 0),
woosh.Token(woosh.NAME, 'del', 955, 12, 955, 15),
woosh.Token(woosh.NAME, 'self', 955, 16, 955, 20),
woosh.Token(woosh.OP, '.', 955, 20, 955, 21),
woosh.Token(woosh.NAME, '_proxies', 955, 21, 955, 29),
woosh.Token(woosh.OP, '[', 955, 29, 955, 30),
woosh.Token(woosh.NAME, 'section', 955, 30, 955, 37),
woosh.Token(woosh.OP, ']', 955, 37, 955, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 955, 38, 956, 0),
woosh.Token(woosh.DEDENT, ' ', 956, 0, 956, 8),
woosh.Token(woosh.NAME, 'return', 956, 8, 956, 14),
woosh.Token(woosh.NAME, 'existed', 956, 15, 956, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 956, 22, 957, 0),
woosh.Token(woosh.DEDENT, ' ', 958, 0, 958, 4),
woosh.Token(woosh.NAME, 'def', 958, 4, 958, 7),
woosh.Token(woosh.NAME, '__getitem__', 958, 8, 958, 19),
woosh.Token(woosh.OP, '(', 958, 19, 958, 20),
woosh.Token(woosh.NAME, 'self', 958, 20, 958, 24),
woosh.Token(woosh.OP, ',', 958, 24, 958, 25),
woosh.Token(woosh.NAME, 'key', 958, 26, 958, 29),
woosh.Token(woosh.OP, ')', 958, 29, 958, 30),
woosh.Token(woosh.OP, ':', 958, 30, 958, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 958, 31, 959, 0),
woosh.Token(woosh.INDENT, ' ', 959, 0, 959, 8),
woosh.Token(woosh.NAME, 'if', 959, 8, 959, 10),
woosh.Token(woosh.NAME, 'key', 959, 11, 959, 14),
woosh.Token(woosh.OP, '!=', 959, 15, 959, 17),
woosh.Token(woosh.NAME, 'self', 959, 18, 959, 22),
woosh.Token(woosh.OP, '.', 959, 22, 959, 23),
woosh.Token(woosh.NAME, 'default_section', 959, 23, 959, 38),
woosh.Token(woosh.NAME, 'and', 959, 39, 959, 42),
woosh.Token(woosh.NAME, 'not', 959, 43, 959, 46),
woosh.Token(woosh.NAME, 'self', 959, 47, 959, 51),
woosh.Token(woosh.OP, '.', 959, 51, 959, 52),
woosh.Token(woosh.NAME, 'has_section', 959, 52, 959, 63),
woosh.Token(woosh.OP, '(', 959, 63, 959, 64),
woosh.Token(woosh.NAME, 'key', 959, 64, 959, 67),
woosh.Token(woosh.OP, ')', 959, 67, 959, 68),
woosh.Token(woosh.OP, ':', 959, 68, 959, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 959, 69, 960, 0),
woosh.Token(woosh.INDENT, ' ', 960, 0, 960, 12),
woosh.Token(woosh.NAME, 'raise', 960, 12, 960, 17),
woosh.Token(woosh.NAME, 'KeyError', 960, 18, 960, 26),
woosh.Token(woosh.OP, '(', 960, 26, 960, 27),
woosh.Token(woosh.NAME, 'key', 960, 27, 960, 30),
woosh.Token(woosh.OP, ')', 960, 30, 960, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 960, 31, 961, 0),
woosh.Token(woosh.DEDENT, ' ', 961, 0, 961, 8),
woosh.Token(woosh.NAME, 'return', 961, 8, 961, 14),
woosh.Token(woosh.NAME, 'self', 961, 15, 961, 19),
woosh.Token(woosh.OP, '.', 961, 19, 961, 20),
woosh.Token(woosh.NAME, '_proxies', 961, 20, 961, 28),
woosh.Token(woosh.OP, '[', 961, 28, 961, 29),
woosh.Token(woosh.NAME, 'key', 961, 29, 961, 32),
woosh.Token(woosh.OP, ']', 961, 32, 961, 33),
woosh.Token(woosh.NEWLINE, '\r\n', 961, 33, 962, 0),
woosh.Token(woosh.DEDENT, ' ', 963, 0, 963, 4),
woosh.Token(woosh.NAME, 'def', 963, 4, 963, 7),
woosh.Token(woosh.NAME, '__setitem__', 963, 8, 963, 19),
woosh.Token(woosh.OP, '(', 963, 19, 963, 20),
woosh.Token(woosh.NAME, 'self', 963, 20, 963, 24),
woosh.Token(woosh.OP, ',', 963, 24, 963, 25),
woosh.Token(woosh.NAME, 'key', 963, 26, 963, 29),
woosh.Token(woosh.OP, ',', 963, 29, 963, 30),
woosh.Token(woosh.NAME, 'value', 963, 31, 963, 36),
woosh.Token(woosh.OP, ')', 963, 36, 963, 37),
woosh.Token(woosh.OP, ':', 963, 37, 963, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 963, 38, 964, 0),
woosh.Token(woosh.COMMENT, '# To conform with the mapping protocol, overwrites existing values in', 964, 8, 964, 77),
woosh.Token(woosh.COMMENT, '# the section.', 965, 8, 965, 22),
woosh.Token(woosh.INDENT, ' ', 966, 0, 966, 8),
woosh.Token(woosh.NAME, 'if', 966, 8, 966, 10),
woosh.Token(woosh.NAME, 'key', 966, 11, 966, 14),
woosh.Token(woosh.NAME, 'in', 966, 15, 966, 17),
woosh.Token(woosh.NAME, 'self', 966, 18, 966, 22),
woosh.Token(woosh.NAME, 'and', 966, 23, 966, 26),
woosh.Token(woosh.NAME, 'self', 966, 27, 966, 31),
woosh.Token(woosh.OP, '[', 966, 31, 966, 32),
woosh.Token(woosh.NAME, 'key', 966, 32, 966, 35),
woosh.Token(woosh.OP, ']', 966, 35, 966, 36),
woosh.Token(woosh.NAME, 'is', 966, 37, 966, 39),
woosh.Token(woosh.NAME, 'value', 966, 40, 966, 45),
woosh.Token(woosh.OP, ':', 966, 45, 966, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 966, 46, 967, 0),
woosh.Token(woosh.INDENT, ' ', 967, 0, 967, 12),
woosh.Token(woosh.NAME, 'return', 967, 12, 967, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 967, 18, 968, 0),
woosh.Token(woosh.COMMENT, '# XXX this is not atomic if read_dict fails at any point. Then again,', 968, 8, 968, 77),
woosh.Token(woosh.COMMENT, '# no update method in configparser is atomic in this implementation.', 969, 8, 969, 76),
woosh.Token(woosh.DEDENT, ' ', 970, 0, 970, 8),
woosh.Token(woosh.NAME, 'if', 970, 8, 970, 10),
woosh.Token(woosh.NAME, 'key', 970, 11, 970, 14),
woosh.Token(woosh.OP, '==', 970, 15, 970, 17),
woosh.Token(woosh.NAME, 'self', 970, 18, 970, 22),
woosh.Token(woosh.OP, '.', 970, 22, 970, 23),
woosh.Token(woosh.NAME, 'default_section', 970, 23, 970, 38),
woosh.Token(woosh.OP, ':', 970, 38, 970, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 970, 39, 971, 0),
woosh.Token(woosh.INDENT, ' ', 971, 0, 971, 12),
woosh.Token(woosh.NAME, 'self', 971, 12, 971, 16),
woosh.Token(woosh.OP, '.', 971, 16, 971, 17),
woosh.Token(woosh.NAME, '_defaults', 971, 17, 971, 26),
woosh.Token(woosh.OP, '.', 971, 26, 971, 27),
woosh.Token(woosh.NAME, 'clear', 971, 27, 971, 32),
woosh.Token(woosh.OP, '(', 971, 32, 971, 33),
woosh.Token(woosh.OP, ')', 971, 33, 971, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 971, 34, 972, 0),
woosh.Token(woosh.DEDENT, ' ', 972, 0, 972, 8),
woosh.Token(woosh.NAME, 'elif', 972, 8, 972, 12),
woosh.Token(woosh.NAME, 'key', 972, 13, 972, 16),
woosh.Token(woosh.NAME, 'in', 972, 17, 972, 19),
woosh.Token(woosh.NAME, 'self', 972, 20, 972, 24),
woosh.Token(woosh.OP, '.', 972, 24, 972, 25),
woosh.Token(woosh.NAME, '_sections', 972, 25, 972, 34),
woosh.Token(woosh.OP, ':', 972, 34, 972, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 972, 35, 973, 0),
woosh.Token(woosh.INDENT, ' ', 973, 0, 973, 12),
woosh.Token(woosh.NAME, 'self', 973, 12, 973, 16),
woosh.Token(woosh.OP, '.', 973, 16, 973, 17),
woosh.Token(woosh.NAME, '_sections', 973, 17, 973, 26),
woosh.Token(woosh.OP, '[', 973, 26, 973, 27),
woosh.Token(woosh.NAME, 'key', 973, 27, 973, 30),
woosh.Token(woosh.OP, ']', 973, 30, 973, 31),
woosh.Token(woosh.OP, '.', 973, 31, 973, 32),
woosh.Token(woosh.NAME, 'clear', 973, 32, 973, 37),
woosh.Token(woosh.OP, '(', 973, 37, 973, 38),
woosh.Token(woosh.OP, ')', 973, 38, 973, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 973, 39, 974, 0),
woosh.Token(woosh.DEDENT, ' ', 974, 0, 974, 8),
woosh.Token(woosh.NAME, 'self', 974, 8, 974, 12),
woosh.Token(woosh.OP, '.', 974, 12, 974, 13),
woosh.Token(woosh.NAME, 'read_dict', 974, 13, 974, 22),
woosh.Token(woosh.OP, '(', 974, 22, 974, 23),
woosh.Token(woosh.OP, '{', 974, 23, 974, 24),
woosh.Token(woosh.NAME, 'key', 974, 24, 974, 27),
woosh.Token(woosh.OP, ':', 974, 27, 974, 28),
woosh.Token(woosh.NAME, 'value', 974, 29, 974, 34),
woosh.Token(woosh.OP, '}', 974, 34, 974, 35),
woosh.Token(woosh.OP, ')', 974, 35, 974, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 974, 36, 975, 0),
woosh.Token(woosh.DEDENT, ' ', 976, 0, 976, 4),
woosh.Token(woosh.NAME, 'def', 976, 4, 976, 7),
woosh.Token(woosh.NAME, '__delitem__', 976, 8, 976, 19),
woosh.Token(woosh.OP, '(', 976, 19, 976, 20),
woosh.Token(woosh.NAME, 'self', 976, 20, 976, 24),
woosh.Token(woosh.OP, ',', 976, 24, 976, 25),
woosh.Token(woosh.NAME, 'key', 976, 26, 976, 29),
woosh.Token(woosh.OP, ')', 976, 29, 976, 30),
woosh.Token(woosh.OP, ':', 976, 30, 976, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 976, 31, 977, 0),
woosh.Token(woosh.INDENT, ' ', 977, 0, 977, 8),
woosh.Token(woosh.NAME, 'if', 977, 8, 977, 10),
woosh.Token(woosh.NAME, 'key', 977, 11, 977, 14),
woosh.Token(woosh.OP, '==', 977, 15, 977, 17),
woosh.Token(woosh.NAME, 'self', 977, 18, 977, 22),
woosh.Token(woosh.OP, '.', 977, 22, 977, 23),
woosh.Token(woosh.NAME, 'default_section', 977, 23, 977, 38),
woosh.Token(woosh.OP, ':', 977, 38, 977, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 977, 39, 978, 0),
woosh.Token(woosh.INDENT, ' ', 978, 0, 978, 12),
woosh.Token(woosh.NAME, 'raise', 978, 12, 978, 17),
woosh.Token(woosh.NAME, 'ValueError', 978, 18, 978, 28),
woosh.Token(woosh.OP, '(', 978, 28, 978, 29),
woosh.Token(woosh.STRING, '"Cannot remove the default section."', 978, 29, 978, 65),
woosh.Token(woosh.OP, ')', 978, 65, 978, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 978, 66, 979, 0),
woosh.Token(woosh.DEDENT, ' ', 979, 0, 979, 8),
woosh.Token(woosh.NAME, 'if', 979, 8, 979, 10),
woosh.Token(woosh.NAME, 'not', 979, 11, 979, 14),
woosh.Token(woosh.NAME, 'self', 979, 15, 979, 19),
woosh.Token(woosh.OP, '.', 979, 19, 979, 20),
woosh.Token(woosh.NAME, 'has_section', 979, 20, 979, 31),
woosh.Token(woosh.OP, '(', 979, 31, 979, 32),
woosh.Token(woosh.NAME, 'key', 979, 32, 979, 35),
woosh.Token(woosh.OP, ')', 979, 35, 979, 36),
woosh.Token(woosh.OP, ':', 979, 36, 979, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 979, 37, 980, 0),
woosh.Token(woosh.INDENT, ' ', 980, 0, 980, 12),
woosh.Token(woosh.NAME, 'raise', 980, 12, 980, 17),
woosh.Token(woosh.NAME, 'KeyError', 980, 18, 980, 26),
woosh.Token(woosh.OP, '(', 980, 26, 980, 27),
woosh.Token(woosh.NAME, 'key', 980, 27, 980, 30),
woosh.Token(woosh.OP, ')', 980, 30, 980, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 980, 31, 981, 0),
woosh.Token(woosh.DEDENT, ' ', 981, 0, 981, 8),
woosh.Token(woosh.NAME, 'self', 981, 8, 981, 12),
woosh.Token(woosh.OP, '.', 981, 12, 981, 13),
woosh.Token(woosh.NAME, 'remove_section', 981, 13, 981, 27),
woosh.Token(woosh.OP, '(', 981, 27, 981, 28),
woosh.Token(woosh.NAME, 'key', 981, 28, 981, 31),
woosh.Token(woosh.OP, ')', 981, 31, 981, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 981, 32, 982, 0),
woosh.Token(woosh.DEDENT, ' ', 983, 0, 983, 4),
woosh.Token(woosh.NAME, 'def', 983, 4, 983, 7),
woosh.Token(woosh.NAME, '__contains__', 983, 8, 983, 20),
woosh.Token(woosh.OP, '(', 983, 20, 983, 21),
woosh.Token(woosh.NAME, 'self', 983, 21, 983, 25),
woosh.Token(woosh.OP, ',', 983, 25, 983, 26),
woosh.Token(woosh.NAME, 'key', 983, 27, 983, 30),
woosh.Token(woosh.OP, ')', 983, 30, 983, 31),
woosh.Token(woosh.OP, ':', 983, 31, 983, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 983, 32, 984, 0),
woosh.Token(woosh.INDENT, ' ', 984, 0, 984, 8),
woosh.Token(woosh.NAME, 'return', 984, 8, 984, 14),
woosh.Token(woosh.NAME, 'key', 984, 15, 984, 18),
woosh.Token(woosh.OP, '==', 984, 19, 984, 21),
woosh.Token(woosh.NAME, 'self', 984, 22, 984, 26),
woosh.Token(woosh.OP, '.', 984, 26, 984, 27),
woosh.Token(woosh.NAME, 'default_section', 984, 27, 984, 42),
woosh.Token(woosh.NAME, 'or', 984, 43, 984, 45),
woosh.Token(woosh.NAME, 'self', 984, 46, 984, 50),
woosh.Token(woosh.OP, '.', 984, 50, 984, 51),
woosh.Token(woosh.NAME, 'has_section', 984, 51, 984, 62),
woosh.Token(woosh.OP, '(', 984, 62, 984, 63),
woosh.Token(woosh.NAME, 'key', 984, 63, 984, 66),
woosh.Token(woosh.OP, ')', 984, 66, 984, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 984, 67, 985, 0),
woosh.Token(woosh.DEDENT, ' ', 986, 0, 986, 4),
woosh.Token(woosh.NAME, 'def', 986, 4, 986, 7),
woosh.Token(woosh.NAME, '__len__', 986, 8, 986, 15),
woosh.Token(woosh.OP, '(', 986, 15, 986, 16),
woosh.Token(woosh.NAME, 'self', 986, 16, 986, 20),
woosh.Token(woosh.OP, ')', 986, 20, 986, 21),
woosh.Token(woosh.OP, ':', 986, 21, 986, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 986, 22, 987, 0),
woosh.Token(woosh.INDENT, ' ', 987, 0, 987, 8),
woosh.Token(woosh.NAME, 'return', 987, 8, 987, 14),
woosh.Token(woosh.NAME, 'len', 987, 15, 987, 18),
woosh.Token(woosh.OP, '(', 987, 18, 987, 19),
woosh.Token(woosh.NAME, 'self', 987, 19, 987, 23),
woosh.Token(woosh.OP, '.', 987, 23, 987, 24),
woosh.Token(woosh.NAME, '_sections', 987, 24, 987, 33),
woosh.Token(woosh.OP, ')', 987, 33, 987, 34),
woosh.Token(woosh.OP, '+', 987, 35, 987, 36),
woosh.Token(woosh.NUMBER, '1', 987, 37, 987, 38),
woosh.Token(woosh.COMMENT, '# the default section', 987, 39, 987, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 987, 60, 988, 0),
woosh.Token(woosh.DEDENT, ' ', 989, 0, 989, 4),
woosh.Token(woosh.NAME, 'def', 989, 4, 989, 7),
woosh.Token(woosh.NAME, '__iter__', 989, 8, 989, 16),
woosh.Token(woosh.OP, '(', 989, 16, 989, 17),
woosh.Token(woosh.NAME, 'self', 989, 17, 989, 21),
woosh.Token(woosh.OP, ')', 989, 21, 989, 22),
woosh.Token(woosh.OP, ':', 989, 22, 989, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 989, 23, 990, 0),
woosh.Token(woosh.COMMENT, '# XXX does it break when underlying container state changed?', 990, 8, 990, 68),
woosh.Token(woosh.INDENT, ' ', 991, 0, 991, 8),
woosh.Token(woosh.NAME, 'return', 991, 8, 991, 14),
woosh.Token(woosh.NAME, 'itertools', 991, 15, 991, 24),
woosh.Token(woosh.OP, '.', 991, 24, 991, 25),
woosh.Token(woosh.NAME, 'chain', 991, 25, 991, 30),
woosh.Token(woosh.OP, '(', 991, 30, 991, 31),
woosh.Token(woosh.OP, '(', 991, 31, 991, 32),
woosh.Token(woosh.NAME, 'self', 991, 32, 991, 36),
woosh.Token(woosh.OP, '.', 991, 36, 991, 37),
woosh.Token(woosh.NAME, 'default_section', 991, 37, 991, 52),
woosh.Token(woosh.OP, ',', 991, 52, 991, 53),
woosh.Token(woosh.OP, ')', 991, 53, 991, 54),
woosh.Token(woosh.OP, ',', 991, 54, 991, 55),
woosh.Token(woosh.NAME, 'self', 991, 56, 991, 60),
woosh.Token(woosh.OP, '.', 991, 60, 991, 61),
woosh.Token(woosh.NAME, '_sections', 991, 61, 991, 70),
woosh.Token(woosh.OP, '.', 991, 70, 991, 71),
woosh.Token(woosh.NAME, 'keys', 991, 71, 991, 75),
woosh.Token(woosh.OP, '(', 991, 75, 991, 76),
woosh.Token(woosh.OP, ')', 991, 76, 991, 77),
woosh.Token(woosh.OP, ')', 991, 77, 991, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 991, 78, 992, 0),
woosh.Token(woosh.DEDENT, ' ', 993, 0, 993, 4),
woosh.Token(woosh.NAME, 'def', 993, 4, 993, 7),
woosh.Token(woosh.NAME, '_read', 993, 8, 993, 13),
woosh.Token(woosh.OP, '(', 993, 13, 993, 14),
woosh.Token(woosh.NAME, 'self', 993, 14, 993, 18),
woosh.Token(woosh.OP, ',', 993, 18, 993, 19),
woosh.Token(woosh.NAME, 'fp', 993, 20, 993, 22),
woosh.Token(woosh.OP, ',', 993, 22, 993, 23),
woosh.Token(woosh.NAME, 'fpname', 993, 24, 993, 30),
woosh.Token(woosh.OP, ')', 993, 30, 993, 31),
woosh.Token(woosh.OP, ':', 993, 31, 993, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 993, 32, 994, 0),
woosh.Token(woosh.INDENT, ' ', 994, 0, 994, 8),
woosh.Token(woosh.STRING, '"""Parse a sectioned configuration file.\r\n\r\n Each section in a configuration file contains a header, indicated by\r\n a name in square brackets (`[]\'), plus key/value options, indicated by\r\n `name\' and `value\' delimited with a specific substring (`=\' or `:\' by\r\n default).\r\n\r\n Values can span multiple lines, as long as they are indented deeper\r\n than the first line of the value. Depending on the parser\'s mode, blank\r\n lines may be treated as parts of multiline values or ignored.\r\n\r\n Configuration files may include comments, prefixed by specific\r\n characters (`#\' and `;\' by default). Comments may appear on their own\r\n in an otherwise empty line or may be entered in lines holding values or\r\n section names.\r\n """', 994, 8, 1009, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 1009, 11, 1010, 0),
woosh.Token(woosh.NAME, 'elements_added', 1010, 8, 1010, 22),
woosh.Token(woosh.OP, '=', 1010, 23, 1010, 24),
woosh.Token(woosh.NAME, 'set', 1010, 25, 1010, 28),
woosh.Token(woosh.OP, '(', 1010, 28, 1010, 29),
woosh.Token(woosh.OP, ')', 1010, 29, 1010, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1010, 30, 1011, 0),
woosh.Token(woosh.NAME, 'cursect', 1011, 8, 1011, 15),
woosh.Token(woosh.OP, '=', 1011, 16, 1011, 17),
woosh.Token(woosh.NAME, 'None', 1011, 18, 1011, 22),
woosh.Token(woosh.COMMENT, '# None, or a dictionary', 1011, 46, 1011, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1011, 69, 1012, 0),
woosh.Token(woosh.NAME, 'sectname', 1012, 8, 1012, 16),
woosh.Token(woosh.OP, '=', 1012, 17, 1012, 18),
woosh.Token(woosh.NAME, 'None', 1012, 19, 1012, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1012, 23, 1013, 0),
woosh.Token(woosh.NAME, 'optname', 1013, 8, 1013, 15),
woosh.Token(woosh.OP, '=', 1013, 16, 1013, 17),
woosh.Token(woosh.NAME, 'None', 1013, 18, 1013, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1013, 22, 1014, 0),
woosh.Token(woosh.NAME, 'lineno', 1014, 8, 1014, 14),
woosh.Token(woosh.OP, '=', 1014, 15, 1014, 16),
woosh.Token(woosh.NUMBER, '0', 1014, 17, 1014, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1014, 18, 1015, 0),
woosh.Token(woosh.NAME, 'indent_level', 1015, 8, 1015, 20),
woosh.Token(woosh.OP, '=', 1015, 21, 1015, 22),
woosh.Token(woosh.NUMBER, '0', 1015, 23, 1015, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1015, 24, 1016, 0),
woosh.Token(woosh.NAME, 'e', 1016, 8, 1016, 9),
woosh.Token(woosh.OP, '=', 1016, 10, 1016, 11),
woosh.Token(woosh.NAME, 'None', 1016, 12, 1016, 16),
woosh.Token(woosh.COMMENT, '# None, or an exception', 1016, 46, 1016, 69),
woosh.Token(woosh.NEWLINE, '\r\n', 1016, 69, 1017, 0),
woosh.Token(woosh.NAME, 'for', 1017, 8, 1017, 11),
woosh.Token(woosh.NAME, 'lineno', 1017, 12, 1017, 18),
woosh.Token(woosh.OP, ',', 1017, 18, 1017, 19),
woosh.Token(woosh.NAME, 'line', 1017, 20, 1017, 24),
woosh.Token(woosh.NAME, 'in', 1017, 25, 1017, 27),
woosh.Token(woosh.NAME, 'enumerate', 1017, 28, 1017, 37),
woosh.Token(woosh.OP, '(', 1017, 37, 1017, 38),
woosh.Token(woosh.NAME, 'fp', 1017, 38, 1017, 40),
woosh.Token(woosh.OP, ',', 1017, 40, 1017, 41),
woosh.Token(woosh.NAME, 'start', 1017, 42, 1017, 47),
woosh.Token(woosh.OP, '=', 1017, 47, 1017, 48),
woosh.Token(woosh.NUMBER, '1', 1017, 48, 1017, 49),
woosh.Token(woosh.OP, ')', 1017, 49, 1017, 50),
woosh.Token(woosh.OP, ':', 1017, 50, 1017, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1017, 51, 1018, 0),
woosh.Token(woosh.INDENT, ' ', 1018, 0, 1018, 12),
woosh.Token(woosh.NAME, 'comment_start', 1018, 12, 1018, 25),
woosh.Token(woosh.OP, '=', 1018, 26, 1018, 27),
woosh.Token(woosh.NAME, 'sys', 1018, 28, 1018, 31),
woosh.Token(woosh.OP, '.', 1018, 31, 1018, 32),
woosh.Token(woosh.NAME, 'maxsize', 1018, 32, 1018, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1018, 39, 1019, 0),
woosh.Token(woosh.COMMENT, '# strip inline comments', 1019, 12, 1019, 35),
woosh.Token(woosh.NAME, 'inline_prefixes', 1020, 12, 1020, 27),
woosh.Token(woosh.OP, '=', 1020, 28, 1020, 29),
woosh.Token(woosh.OP, '{', 1020, 30, 1020, 31),
woosh.Token(woosh.NAME, 'p', 1020, 31, 1020, 32),
woosh.Token(woosh.OP, ':', 1020, 32, 1020, 33),
woosh.Token(woosh.OP, '-', 1020, 34, 1020, 35),
woosh.Token(woosh.NUMBER, '1', 1020, 35, 1020, 36),
woosh.Token(woosh.NAME, 'for', 1020, 37, 1020, 40),
woosh.Token(woosh.NAME, 'p', 1020, 41, 1020, 42),
woosh.Token(woosh.NAME, 'in', 1020, 43, 1020, 45),
woosh.Token(woosh.NAME, 'self', 1020, 46, 1020, 50),
woosh.Token(woosh.OP, '.', 1020, 50, 1020, 51),
woosh.Token(woosh.NAME, '_inline_comment_prefixes', 1020, 51, 1020, 75),
woosh.Token(woosh.OP, '}', 1020, 75, 1020, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1020, 76, 1021, 0),
woosh.Token(woosh.NAME, 'while', 1021, 12, 1021, 17),
woosh.Token(woosh.NAME, 'comment_start', 1021, 18, 1021, 31),
woosh.Token(woosh.OP, '==', 1021, 32, 1021, 34),
woosh.Token(woosh.NAME, 'sys', 1021, 35, 1021, 38),
woosh.Token(woosh.OP, '.', 1021, 38, 1021, 39),
woosh.Token(woosh.NAME, 'maxsize', 1021, 39, 1021, 46),
woosh.Token(woosh.NAME, 'and', 1021, 47, 1021, 50),
woosh.Token(woosh.NAME, 'inline_prefixes', 1021, 51, 1021, 66),
woosh.Token(woosh.OP, ':', 1021, 66, 1021, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1021, 67, 1022, 0),
woosh.Token(woosh.INDENT, ' ', 1022, 0, 1022, 16),
woosh.Token(woosh.NAME, 'next_prefixes', 1022, 16, 1022, 29),
woosh.Token(woosh.OP, '=', 1022, 30, 1022, 31),
woosh.Token(woosh.OP, '{', 1022, 32, 1022, 33),
woosh.Token(woosh.OP, '}', 1022, 33, 1022, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1022, 34, 1023, 0),
woosh.Token(woosh.NAME, 'for', 1023, 16, 1023, 19),
woosh.Token(woosh.NAME, 'prefix', 1023, 20, 1023, 26),
woosh.Token(woosh.OP, ',', 1023, 26, 1023, 27),
woosh.Token(woosh.NAME, 'index', 1023, 28, 1023, 33),
woosh.Token(woosh.NAME, 'in', 1023, 34, 1023, 36),
woosh.Token(woosh.NAME, 'inline_prefixes', 1023, 37, 1023, 52),
woosh.Token(woosh.OP, '.', 1023, 52, 1023, 53),
woosh.Token(woosh.NAME, 'items', 1023, 53, 1023, 58),
woosh.Token(woosh.OP, '(', 1023, 58, 1023, 59),
woosh.Token(woosh.OP, ')', 1023, 59, 1023, 60),
woosh.Token(woosh.OP, ':', 1023, 60, 1023, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1023, 61, 1024, 0),
woosh.Token(woosh.INDENT, ' ', 1024, 0, 1024, 20),
woosh.Token(woosh.NAME, 'index', 1024, 20, 1024, 25),
woosh.Token(woosh.OP, '=', 1024, 26, 1024, 27),
woosh.Token(woosh.NAME, 'line', 1024, 28, 1024, 32),
woosh.Token(woosh.OP, '.', 1024, 32, 1024, 33),
woosh.Token(woosh.NAME, 'find', 1024, 33, 1024, 37),
woosh.Token(woosh.OP, '(', 1024, 37, 1024, 38),
woosh.Token(woosh.NAME, 'prefix', 1024, 38, 1024, 44),
woosh.Token(woosh.OP, ',', 1024, 44, 1024, 45),
woosh.Token(woosh.NAME, 'index', 1024, 46, 1024, 51),
woosh.Token(woosh.OP, '+', 1024, 51, 1024, 52),
woosh.Token(woosh.NUMBER, '1', 1024, 52, 1024, 53),
woosh.Token(woosh.OP, ')', 1024, 53, 1024, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1024, 54, 1025, 0),
woosh.Token(woosh.NAME, 'if', 1025, 20, 1025, 22),
woosh.Token(woosh.NAME, 'index', 1025, 23, 1025, 28),
woosh.Token(woosh.OP, '==', 1025, 29, 1025, 31),
woosh.Token(woosh.OP, '-', 1025, 32, 1025, 33),
woosh.Token(woosh.NUMBER, '1', 1025, 33, 1025, 34),
woosh.Token(woosh.OP, ':', 1025, 34, 1025, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1025, 35, 1026, 0),
woosh.Token(woosh.INDENT, ' ', 1026, 0, 1026, 24),
woosh.Token(woosh.NAME, 'continue', 1026, 24, 1026, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1026, 32, 1027, 0),
woosh.Token(woosh.DEDENT, ' ', 1027, 0, 1027, 20),
woosh.Token(woosh.NAME, 'next_prefixes', 1027, 20, 1027, 33),
woosh.Token(woosh.OP, '[', 1027, 33, 1027, 34),
woosh.Token(woosh.NAME, 'prefix', 1027, 34, 1027, 40),
woosh.Token(woosh.OP, ']', 1027, 40, 1027, 41),
woosh.Token(woosh.OP, '=', 1027, 42, 1027, 43),
woosh.Token(woosh.NAME, 'index', 1027, 44, 1027, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1027, 49, 1028, 0),
woosh.Token(woosh.NAME, 'if', 1028, 20, 1028, 22),
woosh.Token(woosh.NAME, 'index', 1028, 23, 1028, 28),
woosh.Token(woosh.OP, '==', 1028, 29, 1028, 31),
woosh.Token(woosh.NUMBER, '0', 1028, 32, 1028, 33),
woosh.Token(woosh.NAME, 'or', 1028, 34, 1028, 36),
woosh.Token(woosh.OP, '(', 1028, 37, 1028, 38),
woosh.Token(woosh.NAME, 'index', 1028, 38, 1028, 43),
woosh.Token(woosh.OP, '>', 1028, 44, 1028, 45),
woosh.Token(woosh.NUMBER, '0', 1028, 46, 1028, 47),
woosh.Token(woosh.NAME, 'and', 1028, 48, 1028, 51),
woosh.Token(woosh.NAME, 'line', 1028, 52, 1028, 56),
woosh.Token(woosh.OP, '[', 1028, 56, 1028, 57),
woosh.Token(woosh.NAME, 'index', 1028, 57, 1028, 62),
woosh.Token(woosh.OP, '-', 1028, 62, 1028, 63),
woosh.Token(woosh.NUMBER, '1', 1028, 63, 1028, 64),
woosh.Token(woosh.OP, ']', 1028, 64, 1028, 65),
woosh.Token(woosh.OP, '.', 1028, 65, 1028, 66),
woosh.Token(woosh.NAME, 'isspace', 1028, 66, 1028, 73),
woosh.Token(woosh.OP, '(', 1028, 73, 1028, 74),
woosh.Token(woosh.OP, ')', 1028, 74, 1028, 75),
woosh.Token(woosh.OP, ')', 1028, 75, 1028, 76),
woosh.Token(woosh.OP, ':', 1028, 76, 1028, 77),
woosh.Token(woosh.NEWLINE, '\r\n', 1028, 77, 1029, 0),
woosh.Token(woosh.INDENT, ' ', 1029, 0, 1029, 24),
woosh.Token(woosh.NAME, 'comment_start', 1029, 24, 1029, 37),
woosh.Token(woosh.OP, '=', 1029, 38, 1029, 39),
woosh.Token(woosh.NAME, 'min', 1029, 40, 1029, 43),
woosh.Token(woosh.OP, '(', 1029, 43, 1029, 44),
woosh.Token(woosh.NAME, 'comment_start', 1029, 44, 1029, 57),
woosh.Token(woosh.OP, ',', 1029, 57, 1029, 58),
woosh.Token(woosh.NAME, 'index', 1029, 59, 1029, 64),
woosh.Token(woosh.OP, ')', 1029, 64, 1029, 65),
woosh.Token(woosh.NEWLINE, '\r\n', 1029, 65, 1030, 0),
woosh.Token(woosh.DEDENT, ' ', 1030, 0, 1030, 16),
woosh.Token(woosh.DEDENT, '', 1030, 16, 1030, 16),
woosh.Token(woosh.NAME, 'inline_prefixes', 1030, 16, 1030, 31),
woosh.Token(woosh.OP, '=', 1030, 32, 1030, 33),
woosh.Token(woosh.NAME, 'next_prefixes', 1030, 34, 1030, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1030, 47, 1031, 0),
woosh.Token(woosh.COMMENT, '# strip full line comments', 1031, 12, 1031, 38),
woosh.Token(woosh.DEDENT, ' ', 1032, 0, 1032, 12),
woosh.Token(woosh.NAME, 'for', 1032, 12, 1032, 15),
woosh.Token(woosh.NAME, 'prefix', 1032, 16, 1032, 22),
woosh.Token(woosh.NAME, 'in', 1032, 23, 1032, 25),
woosh.Token(woosh.NAME, 'self', 1032, 26, 1032, 30),
woosh.Token(woosh.OP, '.', 1032, 30, 1032, 31),
woosh.Token(woosh.NAME, '_comment_prefixes', 1032, 31, 1032, 48),
woosh.Token(woosh.OP, ':', 1032, 48, 1032, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1032, 49, 1033, 0),
woosh.Token(woosh.INDENT, ' ', 1033, 0, 1033, 16),
woosh.Token(woosh.NAME, 'if', 1033, 16, 1033, 18),
woosh.Token(woosh.NAME, 'line', 1033, 19, 1033, 23),
woosh.Token(woosh.OP, '.', 1033, 23, 1033, 24),
woosh.Token(woosh.NAME, 'strip', 1033, 24, 1033, 29),
woosh.Token(woosh.OP, '(', 1033, 29, 1033, 30),
woosh.Token(woosh.OP, ')', 1033, 30, 1033, 31),
woosh.Token(woosh.OP, '.', 1033, 31, 1033, 32),
woosh.Token(woosh.NAME, 'startswith', 1033, 32, 1033, 42),
woosh.Token(woosh.OP, '(', 1033, 42, 1033, 43),
woosh.Token(woosh.NAME, 'prefix', 1033, 43, 1033, 49),
woosh.Token(woosh.OP, ')', 1033, 49, 1033, 50),
woosh.Token(woosh.OP, ':', 1033, 50, 1033, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1033, 51, 1034, 0),
woosh.Token(woosh.INDENT, ' ', 1034, 0, 1034, 20),
woosh.Token(woosh.NAME, 'comment_start', 1034, 20, 1034, 33),
woosh.Token(woosh.OP, '=', 1034, 34, 1034, 35),
woosh.Token(woosh.NUMBER, '0', 1034, 36, 1034, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1034, 37, 1035, 0),
woosh.Token(woosh.NAME, 'break', 1035, 20, 1035, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1035, 25, 1036, 0),
woosh.Token(woosh.DEDENT, ' ', 1036, 0, 1036, 12),
woosh.Token(woosh.DEDENT, '', 1036, 12, 1036, 12),
woosh.Token(woosh.NAME, 'if', 1036, 12, 1036, 14),
woosh.Token(woosh.NAME, 'comment_start', 1036, 15, 1036, 28),
woosh.Token(woosh.OP, '==', 1036, 29, 1036, 31),
woosh.Token(woosh.NAME, 'sys', 1036, 32, 1036, 35),
woosh.Token(woosh.OP, '.', 1036, 35, 1036, 36),
woosh.Token(woosh.NAME, 'maxsize', 1036, 36, 1036, 43),
woosh.Token(woosh.OP, ':', 1036, 43, 1036, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 1036, 44, 1037, 0),
woosh.Token(woosh.INDENT, ' ', 1037, 0, 1037, 16),
woosh.Token(woosh.NAME, 'comment_start', 1037, 16, 1037, 29),
woosh.Token(woosh.OP, '=', 1037, 30, 1037, 31),
woosh.Token(woosh.NAME, 'None', 1037, 32, 1037, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1037, 36, 1038, 0),
woosh.Token(woosh.DEDENT, ' ', 1038, 0, 1038, 12),
woosh.Token(woosh.NAME, 'value', 1038, 12, 1038, 17),
woosh.Token(woosh.OP, '=', 1038, 18, 1038, 19),
woosh.Token(woosh.NAME, 'line', 1038, 20, 1038, 24),
woosh.Token(woosh.OP, '[', 1038, 24, 1038, 25),
woosh.Token(woosh.OP, ':', 1038, 25, 1038, 26),
woosh.Token(woosh.NAME, 'comment_start', 1038, 26, 1038, 39),
woosh.Token(woosh.OP, ']', 1038, 39, 1038, 40),
woosh.Token(woosh.OP, '.', 1038, 40, 1038, 41),
woosh.Token(woosh.NAME, 'strip', 1038, 41, 1038, 46),
woosh.Token(woosh.OP, '(', 1038, 46, 1038, 47),
woosh.Token(woosh.OP, ')', 1038, 47, 1038, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1038, 48, 1039, 0),
woosh.Token(woosh.NAME, 'if', 1039, 12, 1039, 14),
woosh.Token(woosh.NAME, 'not', 1039, 15, 1039, 18),
woosh.Token(woosh.NAME, 'value', 1039, 19, 1039, 24),
woosh.Token(woosh.OP, ':', 1039, 24, 1039, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1039, 25, 1040, 0),
woosh.Token(woosh.INDENT, ' ', 1040, 0, 1040, 16),
woosh.Token(woosh.NAME, 'if', 1040, 16, 1040, 18),
woosh.Token(woosh.NAME, 'self', 1040, 19, 1040, 23),
woosh.Token(woosh.OP, '.', 1040, 23, 1040, 24),
woosh.Token(woosh.NAME, '_empty_lines_in_values', 1040, 24, 1040, 46),
woosh.Token(woosh.OP, ':', 1040, 46, 1040, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1040, 47, 1041, 0),
woosh.Token(woosh.COMMENT, '# add empty line to the value, but only if there was no', 1041, 20, 1041, 75),
woosh.Token(woosh.COMMENT, '# comment on the line', 1042, 20, 1042, 41),
woosh.Token(woosh.INDENT, ' ', 1043, 0, 1043, 20),
woosh.Token(woosh.NAME, 'if', 1043, 20, 1043, 22),
woosh.Token(woosh.OP, '(', 1043, 23, 1043, 24),
woosh.Token(woosh.NAME, 'comment_start', 1043, 24, 1043, 37),
woosh.Token(woosh.NAME, 'is', 1043, 38, 1043, 40),
woosh.Token(woosh.NAME, 'None', 1043, 41, 1043, 45),
woosh.Token(woosh.NAME, 'and', 1043, 46, 1043, 49),
woosh.Token(woosh.NAME, 'cursect', 1044, 24, 1044, 31),
woosh.Token(woosh.NAME, 'is', 1044, 32, 1044, 34),
woosh.Token(woosh.NAME, 'not', 1044, 35, 1044, 38),
woosh.Token(woosh.NAME, 'None', 1044, 39, 1044, 43),
woosh.Token(woosh.NAME, 'and', 1044, 44, 1044, 47),
woosh.Token(woosh.NAME, 'optname', 1045, 24, 1045, 31),
woosh.Token(woosh.NAME, 'and', 1045, 32, 1045, 35),
woosh.Token(woosh.NAME, 'cursect', 1046, 24, 1046, 31),
woosh.Token(woosh.OP, '[', 1046, 31, 1046, 32),
woosh.Token(woosh.NAME, 'optname', 1046, 32, 1046, 39),
woosh.Token(woosh.OP, ']', 1046, 39, 1046, 40),
woosh.Token(woosh.NAME, 'is', 1046, 41, 1046, 43),
woosh.Token(woosh.NAME, 'not', 1046, 44, 1046, 47),
woosh.Token(woosh.NAME, 'None', 1046, 48, 1046, 52),
woosh.Token(woosh.OP, ')', 1046, 52, 1046, 53),
woosh.Token(woosh.OP, ':', 1046, 53, 1046, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1046, 54, 1047, 0),
woosh.Token(woosh.INDENT, ' ', 1047, 0, 1047, 24),
woosh.Token(woosh.NAME, 'cursect', 1047, 24, 1047, 31),
woosh.Token(woosh.OP, '[', 1047, 31, 1047, 32),
woosh.Token(woosh.NAME, 'optname', 1047, 32, 1047, 39),
woosh.Token(woosh.OP, ']', 1047, 39, 1047, 40),
woosh.Token(woosh.OP, '.', 1047, 40, 1047, 41),
woosh.Token(woosh.NAME, 'append', 1047, 41, 1047, 47),
woosh.Token(woosh.OP, '(', 1047, 47, 1047, 48),
woosh.Token(woosh.STRING, "''", 1047, 48, 1047, 50),
woosh.Token(woosh.OP, ')', 1047, 50, 1047, 51),
woosh.Token(woosh.COMMENT, '# newlines added at join', 1047, 52, 1047, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1047, 76, 1048, 0),
woosh.Token(woosh.DEDENT, ' ', 1048, 0, 1048, 16),
woosh.Token(woosh.DEDENT, '', 1048, 16, 1048, 16),
woosh.Token(woosh.NAME, 'else', 1048, 16, 1048, 20),
woosh.Token(woosh.OP, ':', 1048, 20, 1048, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1048, 21, 1049, 0),
woosh.Token(woosh.COMMENT, '# empty line marks end of value', 1049, 20, 1049, 51),
woosh.Token(woosh.INDENT, ' ', 1050, 0, 1050, 20),
woosh.Token(woosh.NAME, 'indent_level', 1050, 20, 1050, 32),
woosh.Token(woosh.OP, '=', 1050, 33, 1050, 34),
woosh.Token(woosh.NAME, 'sys', 1050, 35, 1050, 38),
woosh.Token(woosh.OP, '.', 1050, 38, 1050, 39),
woosh.Token(woosh.NAME, 'maxsize', 1050, 39, 1050, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1050, 46, 1051, 0),
woosh.Token(woosh.DEDENT, ' ', 1051, 0, 1051, 16),
woosh.Token(woosh.NAME, 'continue', 1051, 16, 1051, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1051, 24, 1052, 0),
woosh.Token(woosh.COMMENT, '# continuation line?', 1052, 12, 1052, 32),
woosh.Token(woosh.DEDENT, ' ', 1053, 0, 1053, 12),
woosh.Token(woosh.NAME, 'first_nonspace', 1053, 12, 1053, 26),
woosh.Token(woosh.OP, '=', 1053, 27, 1053, 28),
woosh.Token(woosh.NAME, 'self', 1053, 29, 1053, 33),
woosh.Token(woosh.OP, '.', 1053, 33, 1053, 34),
woosh.Token(woosh.NAME, 'NONSPACECRE', 1053, 34, 1053, 45),
woosh.Token(woosh.OP, '.', 1053, 45, 1053, 46),
woosh.Token(woosh.NAME, 'search', 1053, 46, 1053, 52),
woosh.Token(woosh.OP, '(', 1053, 52, 1053, 53),
woosh.Token(woosh.NAME, 'line', 1053, 53, 1053, 57),
woosh.Token(woosh.OP, ')', 1053, 57, 1053, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1053, 58, 1054, 0),
woosh.Token(woosh.NAME, 'cur_indent_level', 1054, 12, 1054, 28),
woosh.Token(woosh.OP, '=', 1054, 29, 1054, 30),
woosh.Token(woosh.NAME, 'first_nonspace', 1054, 31, 1054, 45),
woosh.Token(woosh.OP, '.', 1054, 45, 1054, 46),
woosh.Token(woosh.NAME, 'start', 1054, 46, 1054, 51),
woosh.Token(woosh.OP, '(', 1054, 51, 1054, 52),
woosh.Token(woosh.OP, ')', 1054, 52, 1054, 53),
woosh.Token(woosh.NAME, 'if', 1054, 54, 1054, 56),
woosh.Token(woosh.NAME, 'first_nonspace', 1054, 57, 1054, 71),
woosh.Token(woosh.NAME, 'else', 1054, 72, 1054, 76),
woosh.Token(woosh.NUMBER, '0', 1054, 77, 1054, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1054, 78, 1055, 0),
woosh.Token(woosh.NAME, 'if', 1055, 12, 1055, 14),
woosh.Token(woosh.OP, '(', 1055, 15, 1055, 16),
woosh.Token(woosh.NAME, 'cursect', 1055, 16, 1055, 23),
woosh.Token(woosh.NAME, 'is', 1055, 24, 1055, 26),
woosh.Token(woosh.NAME, 'not', 1055, 27, 1055, 30),
woosh.Token(woosh.NAME, 'None', 1055, 31, 1055, 35),
woosh.Token(woosh.NAME, 'and', 1055, 36, 1055, 39),
woosh.Token(woosh.NAME, 'optname', 1055, 40, 1055, 47),
woosh.Token(woosh.NAME, 'and', 1055, 48, 1055, 51),
woosh.Token(woosh.NAME, 'cur_indent_level', 1056, 16, 1056, 32),
woosh.Token(woosh.OP, '>', 1056, 33, 1056, 34),
woosh.Token(woosh.NAME, 'indent_level', 1056, 35, 1056, 47),
woosh.Token(woosh.OP, ')', 1056, 47, 1056, 48),
woosh.Token(woosh.OP, ':', 1056, 48, 1056, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1056, 49, 1057, 0),
woosh.Token(woosh.INDENT, ' ', 1057, 0, 1057, 16),
woosh.Token(woosh.NAME, 'cursect', 1057, 16, 1057, 23),
woosh.Token(woosh.OP, '[', 1057, 23, 1057, 24),
woosh.Token(woosh.NAME, 'optname', 1057, 24, 1057, 31),
woosh.Token(woosh.OP, ']', 1057, 31, 1057, 32),
woosh.Token(woosh.OP, '.', 1057, 32, 1057, 33),
woosh.Token(woosh.NAME, 'append', 1057, 33, 1057, 39),
woosh.Token(woosh.OP, '(', 1057, 39, 1057, 40),
woosh.Token(woosh.NAME, 'value', 1057, 40, 1057, 45),
woosh.Token(woosh.OP, ')', 1057, 45, 1057, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1057, 46, 1058, 0),
woosh.Token(woosh.COMMENT, '# a section header or option header?', 1058, 12, 1058, 48),
woosh.Token(woosh.DEDENT, ' ', 1059, 0, 1059, 12),
woosh.Token(woosh.NAME, 'else', 1059, 12, 1059, 16),
woosh.Token(woosh.OP, ':', 1059, 16, 1059, 17),
woosh.Token(woosh.NEWLINE, '\r\n', 1059, 17, 1060, 0),
woosh.Token(woosh.INDENT, ' ', 1060, 0, 1060, 16),
woosh.Token(woosh.NAME, 'indent_level', 1060, 16, 1060, 28),
woosh.Token(woosh.OP, '=', 1060, 29, 1060, 30),
woosh.Token(woosh.NAME, 'cur_indent_level', 1060, 31, 1060, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1060, 47, 1061, 0),
woosh.Token(woosh.COMMENT, '# is it a section header?', 1061, 16, 1061, 41),
woosh.Token(woosh.NAME, 'mo', 1062, 16, 1062, 18),
woosh.Token(woosh.OP, '=', 1062, 19, 1062, 20),
woosh.Token(woosh.NAME, 'self', 1062, 21, 1062, 25),
woosh.Token(woosh.OP, '.', 1062, 25, 1062, 26),
woosh.Token(woosh.NAME, 'SECTCRE', 1062, 26, 1062, 33),
woosh.Token(woosh.OP, '.', 1062, 33, 1062, 34),
woosh.Token(woosh.NAME, 'match', 1062, 34, 1062, 39),
woosh.Token(woosh.OP, '(', 1062, 39, 1062, 40),
woosh.Token(woosh.NAME, 'value', 1062, 40, 1062, 45),
woosh.Token(woosh.OP, ')', 1062, 45, 1062, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1062, 46, 1063, 0),
woosh.Token(woosh.NAME, 'if', 1063, 16, 1063, 18),
woosh.Token(woosh.NAME, 'mo', 1063, 19, 1063, 21),
woosh.Token(woosh.OP, ':', 1063, 21, 1063, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1063, 22, 1064, 0),
woosh.Token(woosh.INDENT, ' ', 1064, 0, 1064, 20),
woosh.Token(woosh.NAME, 'sectname', 1064, 20, 1064, 28),
woosh.Token(woosh.OP, '=', 1064, 29, 1064, 30),
woosh.Token(woosh.NAME, 'mo', 1064, 31, 1064, 33),
woosh.Token(woosh.OP, '.', 1064, 33, 1064, 34),
woosh.Token(woosh.NAME, 'group', 1064, 34, 1064, 39),
woosh.Token(woosh.OP, '(', 1064, 39, 1064, 40),
woosh.Token(woosh.STRING, "'header'", 1064, 40, 1064, 48),
woosh.Token(woosh.OP, ')', 1064, 48, 1064, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1064, 49, 1065, 0),
woosh.Token(woosh.NAME, 'if', 1065, 20, 1065, 22),
woosh.Token(woosh.NAME, 'sectname', 1065, 23, 1065, 31),
woosh.Token(woosh.NAME, 'in', 1065, 32, 1065, 34),
woosh.Token(woosh.NAME, 'self', 1065, 35, 1065, 39),
woosh.Token(woosh.OP, '.', 1065, 39, 1065, 40),
woosh.Token(woosh.NAME, '_sections', 1065, 40, 1065, 49),
woosh.Token(woosh.OP, ':', 1065, 49, 1065, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1065, 50, 1066, 0),
woosh.Token(woosh.INDENT, ' ', 1066, 0, 1066, 24),
woosh.Token(woosh.NAME, 'if', 1066, 24, 1066, 26),
woosh.Token(woosh.NAME, 'self', 1066, 27, 1066, 31),
woosh.Token(woosh.OP, '.', 1066, 31, 1066, 32),
woosh.Token(woosh.NAME, '_strict', 1066, 32, 1066, 39),
woosh.Token(woosh.NAME, 'and', 1066, 40, 1066, 43),
woosh.Token(woosh.NAME, 'sectname', 1066, 44, 1066, 52),
woosh.Token(woosh.NAME, 'in', 1066, 53, 1066, 55),
woosh.Token(woosh.NAME, 'elements_added', 1066, 56, 1066, 70),
woosh.Token(woosh.OP, ':', 1066, 70, 1066, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1066, 71, 1067, 0),
woosh.Token(woosh.INDENT, ' ', 1067, 0, 1067, 28),
woosh.Token(woosh.NAME, 'raise', 1067, 28, 1067, 33),
woosh.Token(woosh.NAME, 'DuplicateSectionError', 1067, 34, 1067, 55),
woosh.Token(woosh.OP, '(', 1067, 55, 1067, 56),
woosh.Token(woosh.NAME, 'sectname', 1067, 56, 1067, 64),
woosh.Token(woosh.OP, ',', 1067, 64, 1067, 65),
woosh.Token(woosh.NAME, 'fpname', 1067, 66, 1067, 72),
woosh.Token(woosh.OP, ',', 1067, 72, 1067, 73),
woosh.Token(woosh.NAME, 'lineno', 1068, 56, 1068, 62),
woosh.Token(woosh.OP, ')', 1068, 62, 1068, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1068, 63, 1069, 0),
woosh.Token(woosh.DEDENT, ' ', 1069, 0, 1069, 24),
woosh.Token(woosh.NAME, 'cursect', 1069, 24, 1069, 31),
woosh.Token(woosh.OP, '=', 1069, 32, 1069, 33),
woosh.Token(woosh.NAME, 'self', 1069, 34, 1069, 38),
woosh.Token(woosh.OP, '.', 1069, 38, 1069, 39),
woosh.Token(woosh.NAME, '_sections', 1069, 39, 1069, 48),
woosh.Token(woosh.OP, '[', 1069, 48, 1069, 49),
woosh.Token(woosh.NAME, 'sectname', 1069, 49, 1069, 57),
woosh.Token(woosh.OP, ']', 1069, 57, 1069, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1069, 58, 1070, 0),
woosh.Token(woosh.NAME, 'elements_added', 1070, 24, 1070, 38),
woosh.Token(woosh.OP, '.', 1070, 38, 1070, 39),
woosh.Token(woosh.NAME, 'add', 1070, 39, 1070, 42),
woosh.Token(woosh.OP, '(', 1070, 42, 1070, 43),
woosh.Token(woosh.NAME, 'sectname', 1070, 43, 1070, 51),
woosh.Token(woosh.OP, ')', 1070, 51, 1070, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1070, 52, 1071, 0),
woosh.Token(woosh.DEDENT, ' ', 1071, 0, 1071, 20),
woosh.Token(woosh.NAME, 'elif', 1071, 20, 1071, 24),
woosh.Token(woosh.NAME, 'sectname', 1071, 25, 1071, 33),
woosh.Token(woosh.OP, '==', 1071, 34, 1071, 36),
woosh.Token(woosh.NAME, 'self', 1071, 37, 1071, 41),
woosh.Token(woosh.OP, '.', 1071, 41, 1071, 42),
woosh.Token(woosh.NAME, 'default_section', 1071, 42, 1071, 57),
woosh.Token(woosh.OP, ':', 1071, 57, 1071, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1071, 58, 1072, 0),
woosh.Token(woosh.INDENT, ' ', 1072, 0, 1072, 24),
woosh.Token(woosh.NAME, 'cursect', 1072, 24, 1072, 31),
woosh.Token(woosh.OP, '=', 1072, 32, 1072, 33),
woosh.Token(woosh.NAME, 'self', 1072, 34, 1072, 38),
woosh.Token(woosh.OP, '.', 1072, 38, 1072, 39),
woosh.Token(woosh.NAME, '_defaults', 1072, 39, 1072, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1072, 48, 1073, 0),
woosh.Token(woosh.DEDENT, ' ', 1073, 0, 1073, 20),
woosh.Token(woosh.NAME, 'else', 1073, 20, 1073, 24),
woosh.Token(woosh.OP, ':', 1073, 24, 1073, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1073, 25, 1074, 0),
woosh.Token(woosh.INDENT, ' ', 1074, 0, 1074, 24),
woosh.Token(woosh.NAME, 'cursect', 1074, 24, 1074, 31),
woosh.Token(woosh.OP, '=', 1074, 32, 1074, 33),
woosh.Token(woosh.NAME, 'self', 1074, 34, 1074, 38),
woosh.Token(woosh.OP, '.', 1074, 38, 1074, 39),
woosh.Token(woosh.NAME, '_dict', 1074, 39, 1074, 44),
woosh.Token(woosh.OP, '(', 1074, 44, 1074, 45),
woosh.Token(woosh.OP, ')', 1074, 45, 1074, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1074, 46, 1075, 0),
woosh.Token(woosh.NAME, 'self', 1075, 24, 1075, 28),
woosh.Token(woosh.OP, '.', 1075, 28, 1075, 29),
woosh.Token(woosh.NAME, '_sections', 1075, 29, 1075, 38),
woosh.Token(woosh.OP, '[', 1075, 38, 1075, 39),
woosh.Token(woosh.NAME, 'sectname', 1075, 39, 1075, 47),
woosh.Token(woosh.OP, ']', 1075, 47, 1075, 48),
woosh.Token(woosh.OP, '=', 1075, 49, 1075, 50),
woosh.Token(woosh.NAME, 'cursect', 1075, 51, 1075, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1075, 58, 1076, 0),
woosh.Token(woosh.NAME, 'self', 1076, 24, 1076, 28),
woosh.Token(woosh.OP, '.', 1076, 28, 1076, 29),
woosh.Token(woosh.NAME, '_proxies', 1076, 29, 1076, 37),
woosh.Token(woosh.OP, '[', 1076, 37, 1076, 38),
woosh.Token(woosh.NAME, 'sectname', 1076, 38, 1076, 46),
woosh.Token(woosh.OP, ']', 1076, 46, 1076, 47),
woosh.Token(woosh.OP, '=', 1076, 48, 1076, 49),
woosh.Token(woosh.NAME, 'SectionProxy', 1076, 50, 1076, 62),
woosh.Token(woosh.OP, '(', 1076, 62, 1076, 63),
woosh.Token(woosh.NAME, 'self', 1076, 63, 1076, 67),
woosh.Token(woosh.OP, ',', 1076, 67, 1076, 68),
woosh.Token(woosh.NAME, 'sectname', 1076, 69, 1076, 77),
woosh.Token(woosh.OP, ')', 1076, 77, 1076, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1076, 78, 1077, 0),
woosh.Token(woosh.NAME, 'elements_added', 1077, 24, 1077, 38),
woosh.Token(woosh.OP, '.', 1077, 38, 1077, 39),
woosh.Token(woosh.NAME, 'add', 1077, 39, 1077, 42),
woosh.Token(woosh.OP, '(', 1077, 42, 1077, 43),
woosh.Token(woosh.NAME, 'sectname', 1077, 43, 1077, 51),
woosh.Token(woosh.OP, ')', 1077, 51, 1077, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1077, 52, 1078, 0),
woosh.Token(woosh.COMMENT, "# So sections can't start with a continuation line", 1078, 20, 1078, 70),
woosh.Token(woosh.DEDENT, ' ', 1079, 0, 1079, 20),
woosh.Token(woosh.NAME, 'optname', 1079, 20, 1079, 27),
woosh.Token(woosh.OP, '=', 1079, 28, 1079, 29),
woosh.Token(woosh.NAME, 'None', 1079, 30, 1079, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1079, 34, 1080, 0),
woosh.Token(woosh.COMMENT, '# no section header in the file?', 1080, 16, 1080, 48),
woosh.Token(woosh.DEDENT, ' ', 1081, 0, 1081, 16),
woosh.Token(woosh.NAME, 'elif', 1081, 16, 1081, 20),
woosh.Token(woosh.NAME, 'cursect', 1081, 21, 1081, 28),
woosh.Token(woosh.NAME, 'is', 1081, 29, 1081, 31),
woosh.Token(woosh.NAME, 'None', 1081, 32, 1081, 36),
woosh.Token(woosh.OP, ':', 1081, 36, 1081, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1081, 37, 1082, 0),
woosh.Token(woosh.INDENT, ' ', 1082, 0, 1082, 20),
woosh.Token(woosh.NAME, 'raise', 1082, 20, 1082, 25),
woosh.Token(woosh.NAME, 'MissingSectionHeaderError', 1082, 26, 1082, 51),
woosh.Token(woosh.OP, '(', 1082, 51, 1082, 52),
woosh.Token(woosh.NAME, 'fpname', 1082, 52, 1082, 58),
woosh.Token(woosh.OP, ',', 1082, 58, 1082, 59),
woosh.Token(woosh.NAME, 'lineno', 1082, 60, 1082, 66),
woosh.Token(woosh.OP, ',', 1082, 66, 1082, 67),
woosh.Token(woosh.NAME, 'line', 1082, 68, 1082, 72),
woosh.Token(woosh.OP, ')', 1082, 72, 1082, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1082, 73, 1083, 0),
woosh.Token(woosh.COMMENT, '# an option line?', 1083, 16, 1083, 33),
woosh.Token(woosh.DEDENT, ' ', 1084, 0, 1084, 16),
woosh.Token(woosh.NAME, 'else', 1084, 16, 1084, 20),
woosh.Token(woosh.OP, ':', 1084, 20, 1084, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1084, 21, 1085, 0),
woosh.Token(woosh.INDENT, ' ', 1085, 0, 1085, 20),
woosh.Token(woosh.NAME, 'mo', 1085, 20, 1085, 22),
woosh.Token(woosh.OP, '=', 1085, 23, 1085, 24),
woosh.Token(woosh.NAME, 'self', 1085, 25, 1085, 29),
woosh.Token(woosh.OP, '.', 1085, 29, 1085, 30),
woosh.Token(woosh.NAME, '_optcre', 1085, 30, 1085, 37),
woosh.Token(woosh.OP, '.', 1085, 37, 1085, 38),
woosh.Token(woosh.NAME, 'match', 1085, 38, 1085, 43),
woosh.Token(woosh.OP, '(', 1085, 43, 1085, 44),
woosh.Token(woosh.NAME, 'value', 1085, 44, 1085, 49),
woosh.Token(woosh.OP, ')', 1085, 49, 1085, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1085, 50, 1086, 0),
woosh.Token(woosh.NAME, 'if', 1086, 20, 1086, 22),
woosh.Token(woosh.NAME, 'mo', 1086, 23, 1086, 25),
woosh.Token(woosh.OP, ':', 1086, 25, 1086, 26),
woosh.Token(woosh.NEWLINE, '\r\n', 1086, 26, 1087, 0),
woosh.Token(woosh.INDENT, ' ', 1087, 0, 1087, 24),
woosh.Token(woosh.NAME, 'optname', 1087, 24, 1087, 31),
woosh.Token(woosh.OP, ',', 1087, 31, 1087, 32),
woosh.Token(woosh.NAME, 'vi', 1087, 33, 1087, 35),
woosh.Token(woosh.OP, ',', 1087, 35, 1087, 36),
woosh.Token(woosh.NAME, 'optval', 1087, 37, 1087, 43),
woosh.Token(woosh.OP, '=', 1087, 44, 1087, 45),
woosh.Token(woosh.NAME, 'mo', 1087, 46, 1087, 48),
woosh.Token(woosh.OP, '.', 1087, 48, 1087, 49),
woosh.Token(woosh.NAME, 'group', 1087, 49, 1087, 54),
woosh.Token(woosh.OP, '(', 1087, 54, 1087, 55),
woosh.Token(woosh.STRING, "'option'", 1087, 55, 1087, 63),
woosh.Token(woosh.OP, ',', 1087, 63, 1087, 64),
woosh.Token(woosh.STRING, "'vi'", 1087, 65, 1087, 69),
woosh.Token(woosh.OP, ',', 1087, 69, 1087, 70),
woosh.Token(woosh.STRING, "'value'", 1087, 71, 1087, 78),
woosh.Token(woosh.OP, ')', 1087, 78, 1087, 79),
woosh.Token(woosh.NEWLINE, '\r\n', 1087, 79, 1088, 0),
woosh.Token(woosh.NAME, 'if', 1088, 24, 1088, 26),
woosh.Token(woosh.NAME, 'not', 1088, 27, 1088, 30),
woosh.Token(woosh.NAME, 'optname', 1088, 31, 1088, 38),
woosh.Token(woosh.OP, ':', 1088, 38, 1088, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1088, 39, 1089, 0),
woosh.Token(woosh.INDENT, ' ', 1089, 0, 1089, 28),
woosh.Token(woosh.NAME, 'e', 1089, 28, 1089, 29),
woosh.Token(woosh.OP, '=', 1089, 30, 1089, 31),
woosh.Token(woosh.NAME, 'self', 1089, 32, 1089, 36),
woosh.Token(woosh.OP, '.', 1089, 36, 1089, 37),
woosh.Token(woosh.NAME, '_handle_error', 1089, 37, 1089, 50),
woosh.Token(woosh.OP, '(', 1089, 50, 1089, 51),
woosh.Token(woosh.NAME, 'e', 1089, 51, 1089, 52),
woosh.Token(woosh.OP, ',', 1089, 52, 1089, 53),
woosh.Token(woosh.NAME, 'fpname', 1089, 54, 1089, 60),
woosh.Token(woosh.OP, ',', 1089, 60, 1089, 61),
woosh.Token(woosh.NAME, 'lineno', 1089, 62, 1089, 68),
woosh.Token(woosh.OP, ',', 1089, 68, 1089, 69),
woosh.Token(woosh.NAME, 'line', 1089, 70, 1089, 74),
woosh.Token(woosh.OP, ')', 1089, 74, 1089, 75),
woosh.Token(woosh.NEWLINE, '\r\n', 1089, 75, 1090, 0),
woosh.Token(woosh.DEDENT, ' ', 1090, 0, 1090, 24),
woosh.Token(woosh.NAME, 'optname', 1090, 24, 1090, 31),
woosh.Token(woosh.OP, '=', 1090, 32, 1090, 33),
woosh.Token(woosh.NAME, 'self', 1090, 34, 1090, 38),
woosh.Token(woosh.OP, '.', 1090, 38, 1090, 39),
woosh.Token(woosh.NAME, 'optionxform', 1090, 39, 1090, 50),
woosh.Token(woosh.OP, '(', 1090, 50, 1090, 51),
woosh.Token(woosh.NAME, 'optname', 1090, 51, 1090, 58),
woosh.Token(woosh.OP, '.', 1090, 58, 1090, 59),
woosh.Token(woosh.NAME, 'rstrip', 1090, 59, 1090, 65),
woosh.Token(woosh.OP, '(', 1090, 65, 1090, 66),
woosh.Token(woosh.OP, ')', 1090, 66, 1090, 67),
woosh.Token(woosh.OP, ')', 1090, 67, 1090, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 1090, 68, 1091, 0),
woosh.Token(woosh.NAME, 'if', 1091, 24, 1091, 26),
woosh.Token(woosh.OP, '(', 1091, 27, 1091, 28),
woosh.Token(woosh.NAME, 'self', 1091, 28, 1091, 32),
woosh.Token(woosh.OP, '.', 1091, 32, 1091, 33),
woosh.Token(woosh.NAME, '_strict', 1091, 33, 1091, 40),
woosh.Token(woosh.NAME, 'and', 1091, 41, 1091, 44),
woosh.Token(woosh.OP, '(', 1092, 28, 1092, 29),
woosh.Token(woosh.NAME, 'sectname', 1092, 29, 1092, 37),
woosh.Token(woosh.OP, ',', 1092, 37, 1092, 38),
woosh.Token(woosh.NAME, 'optname', 1092, 39, 1092, 46),
woosh.Token(woosh.OP, ')', 1092, 46, 1092, 47),
woosh.Token(woosh.NAME, 'in', 1092, 48, 1092, 50),
woosh.Token(woosh.NAME, 'elements_added', 1092, 51, 1092, 65),
woosh.Token(woosh.OP, ')', 1092, 65, 1092, 66),
woosh.Token(woosh.OP, ':', 1092, 66, 1092, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1092, 67, 1093, 0),
woosh.Token(woosh.INDENT, ' ', 1093, 0, 1093, 28),
woosh.Token(woosh.NAME, 'raise', 1093, 28, 1093, 33),
woosh.Token(woosh.NAME, 'DuplicateOptionError', 1093, 34, 1093, 54),
woosh.Token(woosh.OP, '(', 1093, 54, 1093, 55),
woosh.Token(woosh.NAME, 'sectname', 1093, 55, 1093, 63),
woosh.Token(woosh.OP, ',', 1093, 63, 1093, 64),
woosh.Token(woosh.NAME, 'optname', 1093, 65, 1093, 72),
woosh.Token(woosh.OP, ',', 1093, 72, 1093, 73),
woosh.Token(woosh.NAME, 'fpname', 1094, 55, 1094, 61),
woosh.Token(woosh.OP, ',', 1094, 61, 1094, 62),
woosh.Token(woosh.NAME, 'lineno', 1094, 63, 1094, 69),
woosh.Token(woosh.OP, ')', 1094, 69, 1094, 70),
woosh.Token(woosh.NEWLINE, '\r\n', 1094, 70, 1095, 0),
woosh.Token(woosh.DEDENT, ' ', 1095, 0, 1095, 24),
woosh.Token(woosh.NAME, 'elements_added', 1095, 24, 1095, 38),
woosh.Token(woosh.OP, '.', 1095, 38, 1095, 39),
woosh.Token(woosh.NAME, 'add', 1095, 39, 1095, 42),
woosh.Token(woosh.OP, '(', 1095, 42, 1095, 43),
woosh.Token(woosh.OP, '(', 1095, 43, 1095, 44),
woosh.Token(woosh.NAME, 'sectname', 1095, 44, 1095, 52),
woosh.Token(woosh.OP, ',', 1095, 52, 1095, 53),
woosh.Token(woosh.NAME, 'optname', 1095, 54, 1095, 61),
woosh.Token(woosh.OP, ')', 1095, 61, 1095, 62),
woosh.Token(woosh.OP, ')', 1095, 62, 1095, 63),
woosh.Token(woosh.NEWLINE, '\r\n', 1095, 63, 1096, 0),
woosh.Token(woosh.COMMENT, '# This check is fine because the OPTCRE cannot', 1096, 24, 1096, 70),
woosh.Token(woosh.COMMENT, '# match if it would set optval to None', 1097, 24, 1097, 62),
woosh.Token(woosh.NAME, 'if', 1098, 24, 1098, 26),
woosh.Token(woosh.NAME, 'optval', 1098, 27, 1098, 33),
woosh.Token(woosh.NAME, 'is', 1098, 34, 1098, 36),
woosh.Token(woosh.NAME, 'not', 1098, 37, 1098, 40),
woosh.Token(woosh.NAME, 'None', 1098, 41, 1098, 45),
woosh.Token(woosh.OP, ':', 1098, 45, 1098, 46),
woosh.Token(woosh.NEWLINE, '\r\n', 1098, 46, 1099, 0),
woosh.Token(woosh.INDENT, ' ', 1099, 0, 1099, 28),
woosh.Token(woosh.NAME, 'optval', 1099, 28, 1099, 34),
woosh.Token(woosh.OP, '=', 1099, 35, 1099, 36),
woosh.Token(woosh.NAME, 'optval', 1099, 37, 1099, 43),
woosh.Token(woosh.OP, '.', 1099, 43, 1099, 44),
woosh.Token(woosh.NAME, 'strip', 1099, 44, 1099, 49),
woosh.Token(woosh.OP, '(', 1099, 49, 1099, 50),
woosh.Token(woosh.OP, ')', 1099, 50, 1099, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1099, 51, 1100, 0),
woosh.Token(woosh.NAME, 'cursect', 1100, 28, 1100, 35),
woosh.Token(woosh.OP, '[', 1100, 35, 1100, 36),
woosh.Token(woosh.NAME, 'optname', 1100, 36, 1100, 43),
woosh.Token(woosh.OP, ']', 1100, 43, 1100, 44),
woosh.Token(woosh.OP, '=', 1100, 45, 1100, 46),
woosh.Token(woosh.OP, '[', 1100, 47, 1100, 48),
woosh.Token(woosh.NAME, 'optval', 1100, 48, 1100, 54),
woosh.Token(woosh.OP, ']', 1100, 54, 1100, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1100, 55, 1101, 0),
woosh.Token(woosh.DEDENT, ' ', 1101, 0, 1101, 24),
woosh.Token(woosh.NAME, 'else', 1101, 24, 1101, 28),
woosh.Token(woosh.OP, ':', 1101, 28, 1101, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1101, 29, 1102, 0),
woosh.Token(woosh.COMMENT, '# valueless option handling', 1102, 28, 1102, 55),
woosh.Token(woosh.INDENT, ' ', 1103, 0, 1103, 28),
woosh.Token(woosh.NAME, 'cursect', 1103, 28, 1103, 35),
woosh.Token(woosh.OP, '[', 1103, 35, 1103, 36),
woosh.Token(woosh.NAME, 'optname', 1103, 36, 1103, 43),
woosh.Token(woosh.OP, ']', 1103, 43, 1103, 44),
woosh.Token(woosh.OP, '=', 1103, 45, 1103, 46),
woosh.Token(woosh.NAME, 'None', 1103, 47, 1103, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1103, 51, 1104, 0),
woosh.Token(woosh.DEDENT, ' ', 1104, 0, 1104, 20),
woosh.Token(woosh.DEDENT, '', 1104, 20, 1104, 20),
woosh.Token(woosh.NAME, 'else', 1104, 20, 1104, 24),
woosh.Token(woosh.OP, ':', 1104, 24, 1104, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1104, 25, 1105, 0),
woosh.Token(woosh.COMMENT, '# a non-fatal parsing error occurred. set up the', 1105, 24, 1105, 72),
woosh.Token(woosh.COMMENT, '# exception but keep going. the exception will be', 1106, 24, 1106, 73),
woosh.Token(woosh.COMMENT, '# raised at the end of the file and will contain a', 1107, 24, 1107, 74),
woosh.Token(woosh.COMMENT, '# list of all bogus lines', 1108, 24, 1108, 49),
woosh.Token(woosh.INDENT, ' ', 1109, 0, 1109, 24),
woosh.Token(woosh.NAME, 'e', 1109, 24, 1109, 25),
woosh.Token(woosh.OP, '=', 1109, 26, 1109, 27),
woosh.Token(woosh.NAME, 'self', 1109, 28, 1109, 32),
woosh.Token(woosh.OP, '.', 1109, 32, 1109, 33),
woosh.Token(woosh.NAME, '_handle_error', 1109, 33, 1109, 46),
woosh.Token(woosh.OP, '(', 1109, 46, 1109, 47),
woosh.Token(woosh.NAME, 'e', 1109, 47, 1109, 48),
woosh.Token(woosh.OP, ',', 1109, 48, 1109, 49),
woosh.Token(woosh.NAME, 'fpname', 1109, 50, 1109, 56),
woosh.Token(woosh.OP, ',', 1109, 56, 1109, 57),
woosh.Token(woosh.NAME, 'lineno', 1109, 58, 1109, 64),
woosh.Token(woosh.OP, ',', 1109, 64, 1109, 65),
woosh.Token(woosh.NAME, 'line', 1109, 66, 1109, 70),
woosh.Token(woosh.OP, ')', 1109, 70, 1109, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1109, 71, 1110, 0),
woosh.Token(woosh.DEDENT, ' ', 1110, 0, 1110, 8),
woosh.Token(woosh.DEDENT, '', 1110, 8, 1110, 8),
woosh.Token(woosh.DEDENT, '', 1110, 8, 1110, 8),
woosh.Token(woosh.DEDENT, '', 1110, 8, 1110, 8),
woosh.Token(woosh.NAME, 'self', 1110, 8, 1110, 12),
woosh.Token(woosh.OP, '.', 1110, 12, 1110, 13),
woosh.Token(woosh.NAME, '_join_multiline_values', 1110, 13, 1110, 35),
woosh.Token(woosh.OP, '(', 1110, 35, 1110, 36),
woosh.Token(woosh.OP, ')', 1110, 36, 1110, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1110, 37, 1111, 0),
woosh.Token(woosh.COMMENT, '# if any parsing errors occurred, raise an exception', 1111, 8, 1111, 60),
woosh.Token(woosh.NAME, 'if', 1112, 8, 1112, 10),
woosh.Token(woosh.NAME, 'e', 1112, 11, 1112, 12),
woosh.Token(woosh.OP, ':', 1112, 12, 1112, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1112, 13, 1113, 0),
woosh.Token(woosh.INDENT, ' ', 1113, 0, 1113, 12),
woosh.Token(woosh.NAME, 'raise', 1113, 12, 1113, 17),
woosh.Token(woosh.NAME, 'e', 1113, 18, 1113, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1113, 19, 1114, 0),
woosh.Token(woosh.DEDENT, ' ', 1115, 0, 1115, 4),
woosh.Token(woosh.DEDENT, '', 1115, 4, 1115, 4),
woosh.Token(woosh.NAME, 'def', 1115, 4, 1115, 7),
woosh.Token(woosh.NAME, '_join_multiline_values', 1115, 8, 1115, 30),
woosh.Token(woosh.OP, '(', 1115, 30, 1115, 31),
woosh.Token(woosh.NAME, 'self', 1115, 31, 1115, 35),
woosh.Token(woosh.OP, ')', 1115, 35, 1115, 36),
woosh.Token(woosh.OP, ':', 1115, 36, 1115, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1115, 37, 1116, 0),
woosh.Token(woosh.INDENT, ' ', 1116, 0, 1116, 8),
woosh.Token(woosh.NAME, 'defaults', 1116, 8, 1116, 16),
woosh.Token(woosh.OP, '=', 1116, 17, 1116, 18),
woosh.Token(woosh.NAME, 'self', 1116, 19, 1116, 23),
woosh.Token(woosh.OP, '.', 1116, 23, 1116, 24),
woosh.Token(woosh.NAME, 'default_section', 1116, 24, 1116, 39),
woosh.Token(woosh.OP, ',', 1116, 39, 1116, 40),
woosh.Token(woosh.NAME, 'self', 1116, 41, 1116, 45),
woosh.Token(woosh.OP, '.', 1116, 45, 1116, 46),
woosh.Token(woosh.NAME, '_defaults', 1116, 46, 1116, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1116, 55, 1117, 0),
woosh.Token(woosh.NAME, 'all_sections', 1117, 8, 1117, 20),
woosh.Token(woosh.OP, '=', 1117, 21, 1117, 22),
woosh.Token(woosh.NAME, 'itertools', 1117, 23, 1117, 32),
woosh.Token(woosh.OP, '.', 1117, 32, 1117, 33),
woosh.Token(woosh.NAME, 'chain', 1117, 33, 1117, 38),
woosh.Token(woosh.OP, '(', 1117, 38, 1117, 39),
woosh.Token(woosh.OP, '(', 1117, 39, 1117, 40),
woosh.Token(woosh.NAME, 'defaults', 1117, 40, 1117, 48),
woosh.Token(woosh.OP, ',', 1117, 48, 1117, 49),
woosh.Token(woosh.OP, ')', 1117, 49, 1117, 50),
woosh.Token(woosh.OP, ',', 1117, 50, 1117, 51),
woosh.Token(woosh.NAME, 'self', 1118, 39, 1118, 43),
woosh.Token(woosh.OP, '.', 1118, 43, 1118, 44),
woosh.Token(woosh.NAME, '_sections', 1118, 44, 1118, 53),
woosh.Token(woosh.OP, '.', 1118, 53, 1118, 54),
woosh.Token(woosh.NAME, 'items', 1118, 54, 1118, 59),
woosh.Token(woosh.OP, '(', 1118, 59, 1118, 60),
woosh.Token(woosh.OP, ')', 1118, 60, 1118, 61),
woosh.Token(woosh.OP, ')', 1118, 61, 1118, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1118, 62, 1119, 0),
woosh.Token(woosh.NAME, 'for', 1119, 8, 1119, 11),
woosh.Token(woosh.NAME, 'section', 1119, 12, 1119, 19),
woosh.Token(woosh.OP, ',', 1119, 19, 1119, 20),
woosh.Token(woosh.NAME, 'options', 1119, 21, 1119, 28),
woosh.Token(woosh.NAME, 'in', 1119, 29, 1119, 31),
woosh.Token(woosh.NAME, 'all_sections', 1119, 32, 1119, 44),
woosh.Token(woosh.OP, ':', 1119, 44, 1119, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1119, 45, 1120, 0),
woosh.Token(woosh.INDENT, ' ', 1120, 0, 1120, 12),
woosh.Token(woosh.NAME, 'for', 1120, 12, 1120, 15),
woosh.Token(woosh.NAME, 'name', 1120, 16, 1120, 20),
woosh.Token(woosh.OP, ',', 1120, 20, 1120, 21),
woosh.Token(woosh.NAME, 'val', 1120, 22, 1120, 25),
woosh.Token(woosh.NAME, 'in', 1120, 26, 1120, 28),
woosh.Token(woosh.NAME, 'options', 1120, 29, 1120, 36),
woosh.Token(woosh.OP, '.', 1120, 36, 1120, 37),
woosh.Token(woosh.NAME, 'items', 1120, 37, 1120, 42),
woosh.Token(woosh.OP, '(', 1120, 42, 1120, 43),
woosh.Token(woosh.OP, ')', 1120, 43, 1120, 44),
woosh.Token(woosh.OP, ':', 1120, 44, 1120, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1120, 45, 1121, 0),
woosh.Token(woosh.INDENT, ' ', 1121, 0, 1121, 16),
woosh.Token(woosh.NAME, 'if', 1121, 16, 1121, 18),
woosh.Token(woosh.NAME, 'isinstance', 1121, 19, 1121, 29),
woosh.Token(woosh.OP, '(', 1121, 29, 1121, 30),
woosh.Token(woosh.NAME, 'val', 1121, 30, 1121, 33),
woosh.Token(woosh.OP, ',', 1121, 33, 1121, 34),
woosh.Token(woosh.NAME, 'list', 1121, 35, 1121, 39),
woosh.Token(woosh.OP, ')', 1121, 39, 1121, 40),
woosh.Token(woosh.OP, ':', 1121, 40, 1121, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1121, 41, 1122, 0),
woosh.Token(woosh.INDENT, ' ', 1122, 0, 1122, 20),
woosh.Token(woosh.NAME, 'val', 1122, 20, 1122, 23),
woosh.Token(woosh.OP, '=', 1122, 24, 1122, 25),
woosh.Token(woosh.STRING, "'\\n'", 1122, 26, 1122, 30),
woosh.Token(woosh.OP, '.', 1122, 30, 1122, 31),
woosh.Token(woosh.NAME, 'join', 1122, 31, 1122, 35),
woosh.Token(woosh.OP, '(', 1122, 35, 1122, 36),
woosh.Token(woosh.NAME, 'val', 1122, 36, 1122, 39),
woosh.Token(woosh.OP, ')', 1122, 39, 1122, 40),
woosh.Token(woosh.OP, '.', 1122, 40, 1122, 41),
woosh.Token(woosh.NAME, 'rstrip', 1122, 41, 1122, 47),
woosh.Token(woosh.OP, '(', 1122, 47, 1122, 48),
woosh.Token(woosh.OP, ')', 1122, 48, 1122, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1122, 49, 1123, 0),
woosh.Token(woosh.DEDENT, ' ', 1123, 0, 1123, 16),
woosh.Token(woosh.NAME, 'options', 1123, 16, 1123, 23),
woosh.Token(woosh.OP, '[', 1123, 23, 1123, 24),
woosh.Token(woosh.NAME, 'name', 1123, 24, 1123, 28),
woosh.Token(woosh.OP, ']', 1123, 28, 1123, 29),
woosh.Token(woosh.OP, '=', 1123, 30, 1123, 31),
woosh.Token(woosh.NAME, 'self', 1123, 32, 1123, 36),
woosh.Token(woosh.OP, '.', 1123, 36, 1123, 37),
woosh.Token(woosh.NAME, '_interpolation', 1123, 37, 1123, 51),
woosh.Token(woosh.OP, '.', 1123, 51, 1123, 52),
woosh.Token(woosh.NAME, 'before_read', 1123, 52, 1123, 63),
woosh.Token(woosh.OP, '(', 1123, 63, 1123, 64),
woosh.Token(woosh.NAME, 'self', 1123, 64, 1123, 68),
woosh.Token(woosh.OP, ',', 1123, 68, 1123, 69),
woosh.Token(woosh.NAME, 'section', 1124, 64, 1124, 71),
woosh.Token(woosh.OP, ',', 1124, 71, 1124, 72),
woosh.Token(woosh.NAME, 'name', 1125, 64, 1125, 68),
woosh.Token(woosh.OP, ',', 1125, 68, 1125, 69),
woosh.Token(woosh.NAME, 'val', 1125, 70, 1125, 73),
woosh.Token(woosh.OP, ')', 1125, 73, 1125, 74),
woosh.Token(woosh.NEWLINE, '\r\n', 1125, 74, 1126, 0),
woosh.Token(woosh.DEDENT, ' ', 1127, 0, 1127, 4),
woosh.Token(woosh.DEDENT, '', 1127, 4, 1127, 4),
woosh.Token(woosh.DEDENT, '', 1127, 4, 1127, 4),
woosh.Token(woosh.NAME, 'def', 1127, 4, 1127, 7),
woosh.Token(woosh.NAME, '_read_defaults', 1127, 8, 1127, 22),
woosh.Token(woosh.OP, '(', 1127, 22, 1127, 23),
woosh.Token(woosh.NAME, 'self', 1127, 23, 1127, 27),
woosh.Token(woosh.OP, ',', 1127, 27, 1127, 28),
woosh.Token(woosh.NAME, 'defaults', 1127, 29, 1127, 37),
woosh.Token(woosh.OP, ')', 1127, 37, 1127, 38),
woosh.Token(woosh.OP, ':', 1127, 38, 1127, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1127, 39, 1128, 0),
woosh.Token(woosh.INDENT, ' ', 1128, 0, 1128, 8),
woosh.Token(woosh.STRING, '"""Read the defaults passed in the initializer.\r\n Note: values can be non-string."""', 1128, 8, 1129, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1129, 42, 1130, 0),
woosh.Token(woosh.NAME, 'for', 1130, 8, 1130, 11),
woosh.Token(woosh.NAME, 'key', 1130, 12, 1130, 15),
woosh.Token(woosh.OP, ',', 1130, 15, 1130, 16),
woosh.Token(woosh.NAME, 'value', 1130, 17, 1130, 22),
woosh.Token(woosh.NAME, 'in', 1130, 23, 1130, 25),
woosh.Token(woosh.NAME, 'defaults', 1130, 26, 1130, 34),
woosh.Token(woosh.OP, '.', 1130, 34, 1130, 35),
woosh.Token(woosh.NAME, 'items', 1130, 35, 1130, 40),
woosh.Token(woosh.OP, '(', 1130, 40, 1130, 41),
woosh.Token(woosh.OP, ')', 1130, 41, 1130, 42),
woosh.Token(woosh.OP, ':', 1130, 42, 1130, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1130, 43, 1131, 0),
woosh.Token(woosh.INDENT, ' ', 1131, 0, 1131, 12),
woosh.Token(woosh.NAME, 'self', 1131, 12, 1131, 16),
woosh.Token(woosh.OP, '.', 1131, 16, 1131, 17),
woosh.Token(woosh.NAME, '_defaults', 1131, 17, 1131, 26),
woosh.Token(woosh.OP, '[', 1131, 26, 1131, 27),
woosh.Token(woosh.NAME, 'self', 1131, 27, 1131, 31),
woosh.Token(woosh.OP, '.', 1131, 31, 1131, 32),
woosh.Token(woosh.NAME, 'optionxform', 1131, 32, 1131, 43),
woosh.Token(woosh.OP, '(', 1131, 43, 1131, 44),
woosh.Token(woosh.NAME, 'key', 1131, 44, 1131, 47),
woosh.Token(woosh.OP, ')', 1131, 47, 1131, 48),
woosh.Token(woosh.OP, ']', 1131, 48, 1131, 49),
woosh.Token(woosh.OP, '=', 1131, 50, 1131, 51),
woosh.Token(woosh.NAME, 'value', 1131, 52, 1131, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1131, 57, 1132, 0),
woosh.Token(woosh.DEDENT, ' ', 1133, 0, 1133, 4),
woosh.Token(woosh.DEDENT, '', 1133, 4, 1133, 4),
woosh.Token(woosh.NAME, 'def', 1133, 4, 1133, 7),
woosh.Token(woosh.NAME, '_handle_error', 1133, 8, 1133, 21),
woosh.Token(woosh.OP, '(', 1133, 21, 1133, 22),
woosh.Token(woosh.NAME, 'self', 1133, 22, 1133, 26),
woosh.Token(woosh.OP, ',', 1133, 26, 1133, 27),
woosh.Token(woosh.NAME, 'exc', 1133, 28, 1133, 31),
woosh.Token(woosh.OP, ',', 1133, 31, 1133, 32),
woosh.Token(woosh.NAME, 'fpname', 1133, 33, 1133, 39),
woosh.Token(woosh.OP, ',', 1133, 39, 1133, 40),
woosh.Token(woosh.NAME, 'lineno', 1133, 41, 1133, 47),
woosh.Token(woosh.OP, ',', 1133, 47, 1133, 48),
woosh.Token(woosh.NAME, 'line', 1133, 49, 1133, 53),
woosh.Token(woosh.OP, ')', 1133, 53, 1133, 54),
woosh.Token(woosh.OP, ':', 1133, 54, 1133, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1133, 55, 1134, 0),
woosh.Token(woosh.INDENT, ' ', 1134, 0, 1134, 8),
woosh.Token(woosh.NAME, 'if', 1134, 8, 1134, 10),
woosh.Token(woosh.NAME, 'not', 1134, 11, 1134, 14),
woosh.Token(woosh.NAME, 'exc', 1134, 15, 1134, 18),
woosh.Token(woosh.OP, ':', 1134, 18, 1134, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1134, 19, 1135, 0),
woosh.Token(woosh.INDENT, ' ', 1135, 0, 1135, 12),
woosh.Token(woosh.NAME, 'exc', 1135, 12, 1135, 15),
woosh.Token(woosh.OP, '=', 1135, 16, 1135, 17),
woosh.Token(woosh.NAME, 'ParsingError', 1135, 18, 1135, 30),
woosh.Token(woosh.OP, '(', 1135, 30, 1135, 31),
woosh.Token(woosh.NAME, 'fpname', 1135, 31, 1135, 37),
woosh.Token(woosh.OP, ')', 1135, 37, 1135, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1135, 38, 1136, 0),
woosh.Token(woosh.DEDENT, ' ', 1136, 0, 1136, 8),
woosh.Token(woosh.NAME, 'exc', 1136, 8, 1136, 11),
woosh.Token(woosh.OP, '.', 1136, 11, 1136, 12),
woosh.Token(woosh.NAME, 'append', 1136, 12, 1136, 18),
woosh.Token(woosh.OP, '(', 1136, 18, 1136, 19),
woosh.Token(woosh.NAME, 'lineno', 1136, 19, 1136, 25),
woosh.Token(woosh.OP, ',', 1136, 25, 1136, 26),
woosh.Token(woosh.NAME, 'repr', 1136, 27, 1136, 31),
woosh.Token(woosh.OP, '(', 1136, 31, 1136, 32),
woosh.Token(woosh.NAME, 'line', 1136, 32, 1136, 36),
woosh.Token(woosh.OP, ')', 1136, 36, 1136, 37),
woosh.Token(woosh.OP, ')', 1136, 37, 1136, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1136, 38, 1137, 0),
woosh.Token(woosh.NAME, 'return', 1137, 8, 1137, 14),
woosh.Token(woosh.NAME, 'exc', 1137, 15, 1137, 18),
woosh.Token(woosh.NEWLINE, '\r\n', 1137, 18, 1138, 0),
woosh.Token(woosh.DEDENT, ' ', 1139, 0, 1139, 4),
woosh.Token(woosh.NAME, 'def', 1139, 4, 1139, 7),
woosh.Token(woosh.NAME, '_unify_values', 1139, 8, 1139, 21),
woosh.Token(woosh.OP, '(', 1139, 21, 1139, 22),
woosh.Token(woosh.NAME, 'self', 1139, 22, 1139, 26),
woosh.Token(woosh.OP, ',', 1139, 26, 1139, 27),
woosh.Token(woosh.NAME, 'section', 1139, 28, 1139, 35),
woosh.Token(woosh.OP, ',', 1139, 35, 1139, 36),
woosh.Token(woosh.NAME, 'vars', 1139, 37, 1139, 41),
woosh.Token(woosh.OP, ')', 1139, 41, 1139, 42),
woosh.Token(woosh.OP, ':', 1139, 42, 1139, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1139, 43, 1140, 0),
woosh.Token(woosh.INDENT, ' ', 1140, 0, 1140, 8),
woosh.Token(woosh.STRING, '"""Create a sequence of lookups with \'vars\' taking priority over\r\n the \'section\' which takes priority over the DEFAULTSECT.\r\n\r\n """', 1140, 8, 1143, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 1143, 11, 1144, 0),
woosh.Token(woosh.NAME, 'sectiondict', 1144, 8, 1144, 19),
woosh.Token(woosh.OP, '=', 1144, 20, 1144, 21),
woosh.Token(woosh.OP, '{', 1144, 22, 1144, 23),
woosh.Token(woosh.OP, '}', 1144, 23, 1144, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1144, 24, 1145, 0),
woosh.Token(woosh.NAME, 'try', 1145, 8, 1145, 11),
woosh.Token(woosh.OP, ':', 1145, 11, 1145, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1145, 12, 1146, 0),
woosh.Token(woosh.INDENT, ' ', 1146, 0, 1146, 12),
woosh.Token(woosh.NAME, 'sectiondict', 1146, 12, 1146, 23),
woosh.Token(woosh.OP, '=', 1146, 24, 1146, 25),
woosh.Token(woosh.NAME, 'self', 1146, 26, 1146, 30),
woosh.Token(woosh.OP, '.', 1146, 30, 1146, 31),
woosh.Token(woosh.NAME, '_sections', 1146, 31, 1146, 40),
woosh.Token(woosh.OP, '[', 1146, 40, 1146, 41),
woosh.Token(woosh.NAME, 'section', 1146, 41, 1146, 48),
woosh.Token(woosh.OP, ']', 1146, 48, 1146, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1146, 49, 1147, 0),
woosh.Token(woosh.DEDENT, ' ', 1147, 0, 1147, 8),
woosh.Token(woosh.NAME, 'except', 1147, 8, 1147, 14),
woosh.Token(woosh.NAME, 'KeyError', 1147, 15, 1147, 23),
woosh.Token(woosh.OP, ':', 1147, 23, 1147, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1147, 24, 1148, 0),
woosh.Token(woosh.INDENT, ' ', 1148, 0, 1148, 12),
woosh.Token(woosh.NAME, 'if', 1148, 12, 1148, 14),
woosh.Token(woosh.NAME, 'section', 1148, 15, 1148, 22),
woosh.Token(woosh.OP, '!=', 1148, 23, 1148, 25),
woosh.Token(woosh.NAME, 'self', 1148, 26, 1148, 30),
woosh.Token(woosh.OP, '.', 1148, 30, 1148, 31),
woosh.Token(woosh.NAME, 'default_section', 1148, 31, 1148, 46),
woosh.Token(woosh.OP, ':', 1148, 46, 1148, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1148, 47, 1149, 0),
woosh.Token(woosh.INDENT, ' ', 1149, 0, 1149, 16),
woosh.Token(woosh.NAME, 'raise', 1149, 16, 1149, 21),
woosh.Token(woosh.NAME, 'NoSectionError', 1149, 22, 1149, 36),
woosh.Token(woosh.OP, '(', 1149, 36, 1149, 37),
woosh.Token(woosh.NAME, 'section', 1149, 37, 1149, 44),
woosh.Token(woosh.OP, ')', 1149, 44, 1149, 45),
woosh.Token(woosh.NAME, 'from', 1149, 46, 1149, 50),
woosh.Token(woosh.NAME, 'None', 1149, 51, 1149, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1149, 55, 1150, 0),
woosh.Token(woosh.COMMENT, '# Update with the entry specific variables', 1150, 8, 1150, 50),
woosh.Token(woosh.DEDENT, ' ', 1151, 0, 1151, 8),
woosh.Token(woosh.DEDENT, '', 1151, 8, 1151, 8),
woosh.Token(woosh.NAME, 'vardict', 1151, 8, 1151, 15),
woosh.Token(woosh.OP, '=', 1151, 16, 1151, 17),
woosh.Token(woosh.OP, '{', 1151, 18, 1151, 19),
woosh.Token(woosh.OP, '}', 1151, 19, 1151, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1151, 20, 1152, 0),
woosh.Token(woosh.NAME, 'if', 1152, 8, 1152, 10),
woosh.Token(woosh.NAME, 'vars', 1152, 11, 1152, 15),
woosh.Token(woosh.OP, ':', 1152, 15, 1152, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1152, 16, 1153, 0),
woosh.Token(woosh.INDENT, ' ', 1153, 0, 1153, 12),
woosh.Token(woosh.NAME, 'for', 1153, 12, 1153, 15),
woosh.Token(woosh.NAME, 'key', 1153, 16, 1153, 19),
woosh.Token(woosh.OP, ',', 1153, 19, 1153, 20),
woosh.Token(woosh.NAME, 'value', 1153, 21, 1153, 26),
woosh.Token(woosh.NAME, 'in', 1153, 27, 1153, 29),
woosh.Token(woosh.NAME, 'vars', 1153, 30, 1153, 34),
woosh.Token(woosh.OP, '.', 1153, 34, 1153, 35),
woosh.Token(woosh.NAME, 'items', 1153, 35, 1153, 40),
woosh.Token(woosh.OP, '(', 1153, 40, 1153, 41),
woosh.Token(woosh.OP, ')', 1153, 41, 1153, 42),
woosh.Token(woosh.OP, ':', 1153, 42, 1153, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1153, 43, 1154, 0),
woosh.Token(woosh.INDENT, ' ', 1154, 0, 1154, 16),
woosh.Token(woosh.NAME, 'if', 1154, 16, 1154, 18),
woosh.Token(woosh.NAME, 'value', 1154, 19, 1154, 24),
woosh.Token(woosh.NAME, 'is', 1154, 25, 1154, 27),
woosh.Token(woosh.NAME, 'not', 1154, 28, 1154, 31),
woosh.Token(woosh.NAME, 'None', 1154, 32, 1154, 36),
woosh.Token(woosh.OP, ':', 1154, 36, 1154, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1154, 37, 1155, 0),
woosh.Token(woosh.INDENT, ' ', 1155, 0, 1155, 20),
woosh.Token(woosh.NAME, 'value', 1155, 20, 1155, 25),
woosh.Token(woosh.OP, '=', 1155, 26, 1155, 27),
woosh.Token(woosh.NAME, 'str', 1155, 28, 1155, 31),
woosh.Token(woosh.OP, '(', 1155, 31, 1155, 32),
woosh.Token(woosh.NAME, 'value', 1155, 32, 1155, 37),
woosh.Token(woosh.OP, ')', 1155, 37, 1155, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1155, 38, 1156, 0),
woosh.Token(woosh.DEDENT, ' ', 1156, 0, 1156, 16),
woosh.Token(woosh.NAME, 'vardict', 1156, 16, 1156, 23),
woosh.Token(woosh.OP, '[', 1156, 23, 1156, 24),
woosh.Token(woosh.NAME, 'self', 1156, 24, 1156, 28),
woosh.Token(woosh.OP, '.', 1156, 28, 1156, 29),
woosh.Token(woosh.NAME, 'optionxform', 1156, 29, 1156, 40),
woosh.Token(woosh.OP, '(', 1156, 40, 1156, 41),
woosh.Token(woosh.NAME, 'key', 1156, 41, 1156, 44),
woosh.Token(woosh.OP, ')', 1156, 44, 1156, 45),
woosh.Token(woosh.OP, ']', 1156, 45, 1156, 46),
woosh.Token(woosh.OP, '=', 1156, 47, 1156, 48),
woosh.Token(woosh.NAME, 'value', 1156, 49, 1156, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1156, 54, 1157, 0),
woosh.Token(woosh.DEDENT, ' ', 1157, 0, 1157, 8),
woosh.Token(woosh.DEDENT, '', 1157, 8, 1157, 8),
woosh.Token(woosh.NAME, 'return', 1157, 8, 1157, 14),
woosh.Token(woosh.NAME, '_ChainMap', 1157, 15, 1157, 24),
woosh.Token(woosh.OP, '(', 1157, 24, 1157, 25),
woosh.Token(woosh.NAME, 'vardict', 1157, 25, 1157, 32),
woosh.Token(woosh.OP, ',', 1157, 32, 1157, 33),
woosh.Token(woosh.NAME, 'sectiondict', 1157, 34, 1157, 45),
woosh.Token(woosh.OP, ',', 1157, 45, 1157, 46),
woosh.Token(woosh.NAME, 'self', 1157, 47, 1157, 51),
woosh.Token(woosh.OP, '.', 1157, 51, 1157, 52),
woosh.Token(woosh.NAME, '_defaults', 1157, 52, 1157, 61),
woosh.Token(woosh.OP, ')', 1157, 61, 1157, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1157, 62, 1158, 0),
woosh.Token(woosh.DEDENT, ' ', 1159, 0, 1159, 4),
woosh.Token(woosh.NAME, 'def', 1159, 4, 1159, 7),
woosh.Token(woosh.NAME, '_convert_to_boolean', 1159, 8, 1159, 27),
woosh.Token(woosh.OP, '(', 1159, 27, 1159, 28),
woosh.Token(woosh.NAME, 'self', 1159, 28, 1159, 32),
woosh.Token(woosh.OP, ',', 1159, 32, 1159, 33),
woosh.Token(woosh.NAME, 'value', 1159, 34, 1159, 39),
woosh.Token(woosh.OP, ')', 1159, 39, 1159, 40),
woosh.Token(woosh.OP, ':', 1159, 40, 1159, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1159, 41, 1160, 0),
woosh.Token(woosh.INDENT, ' ', 1160, 0, 1160, 8),
woosh.Token(woosh.STRING, '"""Return a boolean value translating from other types if necessary.\r\n """', 1160, 8, 1161, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 1161, 11, 1162, 0),
woosh.Token(woosh.NAME, 'if', 1162, 8, 1162, 10),
woosh.Token(woosh.NAME, 'value', 1162, 11, 1162, 16),
woosh.Token(woosh.OP, '.', 1162, 16, 1162, 17),
woosh.Token(woosh.NAME, 'lower', 1162, 17, 1162, 22),
woosh.Token(woosh.OP, '(', 1162, 22, 1162, 23),
woosh.Token(woosh.OP, ')', 1162, 23, 1162, 24),
woosh.Token(woosh.NAME, 'not', 1162, 25, 1162, 28),
woosh.Token(woosh.NAME, 'in', 1162, 29, 1162, 31),
woosh.Token(woosh.NAME, 'self', 1162, 32, 1162, 36),
woosh.Token(woosh.OP, '.', 1162, 36, 1162, 37),
woosh.Token(woosh.NAME, 'BOOLEAN_STATES', 1162, 37, 1162, 51),
woosh.Token(woosh.OP, ':', 1162, 51, 1162, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1162, 52, 1163, 0),
woosh.Token(woosh.INDENT, ' ', 1163, 0, 1163, 12),
woosh.Token(woosh.NAME, 'raise', 1163, 12, 1163, 17),
woosh.Token(woosh.NAME, 'ValueError', 1163, 18, 1163, 28),
woosh.Token(woosh.OP, '(', 1163, 28, 1163, 29),
woosh.Token(woosh.STRING, "'Not a boolean: %s'", 1163, 29, 1163, 48),
woosh.Token(woosh.OP, '%', 1163, 49, 1163, 50),
woosh.Token(woosh.NAME, 'value', 1163, 51, 1163, 56),
woosh.Token(woosh.OP, ')', 1163, 56, 1163, 57),
woosh.Token(woosh.NEWLINE, '\r\n', 1163, 57, 1164, 0),
woosh.Token(woosh.DEDENT, ' ', 1164, 0, 1164, 8),
woosh.Token(woosh.NAME, 'return', 1164, 8, 1164, 14),
woosh.Token(woosh.NAME, 'self', 1164, 15, 1164, 19),
woosh.Token(woosh.OP, '.', 1164, 19, 1164, 20),
woosh.Token(woosh.NAME, 'BOOLEAN_STATES', 1164, 20, 1164, 34),
woosh.Token(woosh.OP, '[', 1164, 34, 1164, 35),
woosh.Token(woosh.NAME, 'value', 1164, 35, 1164, 40),
woosh.Token(woosh.OP, '.', 1164, 40, 1164, 41),
woosh.Token(woosh.NAME, 'lower', 1164, 41, 1164, 46),
woosh.Token(woosh.OP, '(', 1164, 46, 1164, 47),
woosh.Token(woosh.OP, ')', 1164, 47, 1164, 48),
woosh.Token(woosh.OP, ']', 1164, 48, 1164, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1164, 49, 1165, 0),
woosh.Token(woosh.DEDENT, ' ', 1166, 0, 1166, 4),
woosh.Token(woosh.NAME, 'def', 1166, 4, 1166, 7),
woosh.Token(woosh.NAME, '_validate_value_types', 1166, 8, 1166, 29),
woosh.Token(woosh.OP, '(', 1166, 29, 1166, 30),
woosh.Token(woosh.NAME, 'self', 1166, 30, 1166, 34),
woosh.Token(woosh.OP, ',', 1166, 34, 1166, 35),
woosh.Token(woosh.OP, '*', 1166, 36, 1166, 37),
woosh.Token(woosh.OP, ',', 1166, 37, 1166, 38),
woosh.Token(woosh.NAME, 'section', 1166, 39, 1166, 46),
woosh.Token(woosh.OP, '=', 1166, 46, 1166, 47),
woosh.Token(woosh.STRING, '""', 1166, 47, 1166, 49),
woosh.Token(woosh.OP, ',', 1166, 49, 1166, 50),
woosh.Token(woosh.NAME, 'option', 1166, 51, 1166, 57),
woosh.Token(woosh.OP, '=', 1166, 57, 1166, 58),
woosh.Token(woosh.STRING, '""', 1166, 58, 1166, 60),
woosh.Token(woosh.OP, ',', 1166, 60, 1166, 61),
woosh.Token(woosh.NAME, 'value', 1166, 62, 1166, 67),
woosh.Token(woosh.OP, '=', 1166, 67, 1166, 68),
woosh.Token(woosh.STRING, '""', 1166, 68, 1166, 70),
woosh.Token(woosh.OP, ')', 1166, 70, 1166, 71),
woosh.Token(woosh.OP, ':', 1166, 71, 1166, 72),
woosh.Token(woosh.NEWLINE, '\r\n', 1166, 72, 1167, 0),
woosh.Token(woosh.INDENT, ' ', 1167, 0, 1167, 8),
woosh.Token(woosh.STRING, '"""Raises a TypeError for non-string values.\r\n\r\n The only legal non-string value if we allow valueless\r\n options is None, so we need to check if the value is a\r\n string if:\r\n - we do not allow valueless options, or\r\n - we allow valueless options but the value is not None\r\n\r\n For compatibility reasons this method is not used in classic set()\r\n for RawConfigParsers. It is invoked in every case for mapping protocol\r\n access and in ConfigParser.set().\r\n """', 1167, 8, 1178, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 1178, 11, 1179, 0),
woosh.Token(woosh.NAME, 'if', 1179, 8, 1179, 10),
woosh.Token(woosh.NAME, 'not', 1179, 11, 1179, 14),
woosh.Token(woosh.NAME, 'isinstance', 1179, 15, 1179, 25),
woosh.Token(woosh.OP, '(', 1179, 25, 1179, 26),
woosh.Token(woosh.NAME, 'section', 1179, 26, 1179, 33),
woosh.Token(woosh.OP, ',', 1179, 33, 1179, 34),
woosh.Token(woosh.NAME, 'str', 1179, 35, 1179, 38),
woosh.Token(woosh.OP, ')', 1179, 38, 1179, 39),
woosh.Token(woosh.OP, ':', 1179, 39, 1179, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1179, 40, 1180, 0),
woosh.Token(woosh.INDENT, ' ', 1180, 0, 1180, 12),
woosh.Token(woosh.NAME, 'raise', 1180, 12, 1180, 17),
woosh.Token(woosh.NAME, 'TypeError', 1180, 18, 1180, 27),
woosh.Token(woosh.OP, '(', 1180, 27, 1180, 28),
woosh.Token(woosh.STRING, '"section names must be strings"', 1180, 28, 1180, 59),
woosh.Token(woosh.OP, ')', 1180, 59, 1180, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1180, 60, 1181, 0),
woosh.Token(woosh.DEDENT, ' ', 1181, 0, 1181, 8),
woosh.Token(woosh.NAME, 'if', 1181, 8, 1181, 10),
woosh.Token(woosh.NAME, 'not', 1181, 11, 1181, 14),
woosh.Token(woosh.NAME, 'isinstance', 1181, 15, 1181, 25),
woosh.Token(woosh.OP, '(', 1181, 25, 1181, 26),
woosh.Token(woosh.NAME, 'option', 1181, 26, 1181, 32),
woosh.Token(woosh.OP, ',', 1181, 32, 1181, 33),
woosh.Token(woosh.NAME, 'str', 1181, 34, 1181, 37),
woosh.Token(woosh.OP, ')', 1181, 37, 1181, 38),
woosh.Token(woosh.OP, ':', 1181, 38, 1181, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1181, 39, 1182, 0),
woosh.Token(woosh.INDENT, ' ', 1182, 0, 1182, 12),
woosh.Token(woosh.NAME, 'raise', 1182, 12, 1182, 17),
woosh.Token(woosh.NAME, 'TypeError', 1182, 18, 1182, 27),
woosh.Token(woosh.OP, '(', 1182, 27, 1182, 28),
woosh.Token(woosh.STRING, '"option keys must be strings"', 1182, 28, 1182, 57),
woosh.Token(woosh.OP, ')', 1182, 57, 1182, 58),
woosh.Token(woosh.NEWLINE, '\r\n', 1182, 58, 1183, 0),
woosh.Token(woosh.DEDENT, ' ', 1183, 0, 1183, 8),
woosh.Token(woosh.NAME, 'if', 1183, 8, 1183, 10),
woosh.Token(woosh.NAME, 'not', 1183, 11, 1183, 14),
woosh.Token(woosh.NAME, 'self', 1183, 15, 1183, 19),
woosh.Token(woosh.OP, '.', 1183, 19, 1183, 20),
woosh.Token(woosh.NAME, '_allow_no_value', 1183, 20, 1183, 35),
woosh.Token(woosh.NAME, 'or', 1183, 36, 1183, 38),
woosh.Token(woosh.NAME, 'value', 1183, 39, 1183, 44),
woosh.Token(woosh.OP, ':', 1183, 44, 1183, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1183, 45, 1184, 0),
woosh.Token(woosh.INDENT, ' ', 1184, 0, 1184, 12),
woosh.Token(woosh.NAME, 'if', 1184, 12, 1184, 14),
woosh.Token(woosh.NAME, 'not', 1184, 15, 1184, 18),
woosh.Token(woosh.NAME, 'isinstance', 1184, 19, 1184, 29),
woosh.Token(woosh.OP, '(', 1184, 29, 1184, 30),
woosh.Token(woosh.NAME, 'value', 1184, 30, 1184, 35),
woosh.Token(woosh.OP, ',', 1184, 35, 1184, 36),
woosh.Token(woosh.NAME, 'str', 1184, 37, 1184, 40),
woosh.Token(woosh.OP, ')', 1184, 40, 1184, 41),
woosh.Token(woosh.OP, ':', 1184, 41, 1184, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1184, 42, 1185, 0),
woosh.Token(woosh.INDENT, ' ', 1185, 0, 1185, 16),
woosh.Token(woosh.NAME, 'raise', 1185, 16, 1185, 21),
woosh.Token(woosh.NAME, 'TypeError', 1185, 22, 1185, 31),
woosh.Token(woosh.OP, '(', 1185, 31, 1185, 32),
woosh.Token(woosh.STRING, '"option values must be strings"', 1185, 32, 1185, 63),
woosh.Token(woosh.OP, ')', 1185, 63, 1185, 64),
woosh.Token(woosh.NEWLINE, '\r\n', 1185, 64, 1186, 0),
woosh.Token(woosh.DEDENT, ' ', 1187, 0, 1187, 4),
woosh.Token(woosh.DEDENT, '', 1187, 4, 1187, 4),
woosh.Token(woosh.DEDENT, '', 1187, 4, 1187, 4),
woosh.Token(woosh.OP, '@', 1187, 4, 1187, 5),
woosh.Token(woosh.NAME, 'property', 1187, 5, 1187, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1187, 13, 1188, 0),
woosh.Token(woosh.NAME, 'def', 1188, 4, 1188, 7),
woosh.Token(woosh.NAME, 'converters', 1188, 8, 1188, 18),
woosh.Token(woosh.OP, '(', 1188, 18, 1188, 19),
woosh.Token(woosh.NAME, 'self', 1188, 19, 1188, 23),
woosh.Token(woosh.OP, ')', 1188, 23, 1188, 24),
woosh.Token(woosh.OP, ':', 1188, 24, 1188, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1188, 25, 1189, 0),
woosh.Token(woosh.INDENT, ' ', 1189, 0, 1189, 8),
woosh.Token(woosh.NAME, 'return', 1189, 8, 1189, 14),
woosh.Token(woosh.NAME, 'self', 1189, 15, 1189, 19),
woosh.Token(woosh.OP, '.', 1189, 19, 1189, 20),
woosh.Token(woosh.NAME, '_converters', 1189, 20, 1189, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1189, 31, 1190, 0),
woosh.Token(woosh.DEDENT, '', 1192, 0, 1192, 0),
woosh.Token(woosh.DEDENT, '', 1192, 0, 1192, 0),
woosh.Token(woosh.NAME, 'class', 1192, 0, 1192, 5),
woosh.Token(woosh.NAME, 'ConfigParser', 1192, 6, 1192, 18),
woosh.Token(woosh.OP, '(', 1192, 18, 1192, 19),
woosh.Token(woosh.NAME, 'RawConfigParser', 1192, 19, 1192, 34),
woosh.Token(woosh.OP, ')', 1192, 34, 1192, 35),
woosh.Token(woosh.OP, ':', 1192, 35, 1192, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1192, 36, 1193, 0),
woosh.Token(woosh.INDENT, ' ', 1193, 0, 1193, 4),
woosh.Token(woosh.STRING, '"""ConfigParser implementing interpolation."""', 1193, 4, 1193, 50),
woosh.Token(woosh.NEWLINE, '\r\n', 1193, 50, 1194, 0),
woosh.Token(woosh.NAME, '_DEFAULT_INTERPOLATION', 1195, 4, 1195, 26),
woosh.Token(woosh.OP, '=', 1195, 27, 1195, 28),
woosh.Token(woosh.NAME, 'BasicInterpolation', 1195, 29, 1195, 47),
woosh.Token(woosh.OP, '(', 1195, 47, 1195, 48),
woosh.Token(woosh.OP, ')', 1195, 48, 1195, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1195, 49, 1196, 0),
woosh.Token(woosh.NAME, 'def', 1197, 4, 1197, 7),
woosh.Token(woosh.NAME, 'set', 1197, 8, 1197, 11),
woosh.Token(woosh.OP, '(', 1197, 11, 1197, 12),
woosh.Token(woosh.NAME, 'self', 1197, 12, 1197, 16),
woosh.Token(woosh.OP, ',', 1197, 16, 1197, 17),
woosh.Token(woosh.NAME, 'section', 1197, 18, 1197, 25),
woosh.Token(woosh.OP, ',', 1197, 25, 1197, 26),
woosh.Token(woosh.NAME, 'option', 1197, 27, 1197, 33),
woosh.Token(woosh.OP, ',', 1197, 33, 1197, 34),
woosh.Token(woosh.NAME, 'value', 1197, 35, 1197, 40),
woosh.Token(woosh.OP, '=', 1197, 40, 1197, 41),
woosh.Token(woosh.NAME, 'None', 1197, 41, 1197, 45),
woosh.Token(woosh.OP, ')', 1197, 45, 1197, 46),
woosh.Token(woosh.OP, ':', 1197, 46, 1197, 47),
woosh.Token(woosh.NEWLINE, '\r\n', 1197, 47, 1198, 0),
woosh.Token(woosh.INDENT, ' ', 1198, 0, 1198, 8),
woosh.Token(woosh.STRING, '"""Set an option. Extends RawConfigParser.set by validating type and\r\n interpolation syntax on the value."""', 1198, 8, 1199, 45),
woosh.Token(woosh.NEWLINE, '\r\n', 1199, 45, 1200, 0),
woosh.Token(woosh.NAME, 'self', 1200, 8, 1200, 12),
woosh.Token(woosh.OP, '.', 1200, 12, 1200, 13),
woosh.Token(woosh.NAME, '_validate_value_types', 1200, 13, 1200, 34),
woosh.Token(woosh.OP, '(', 1200, 34, 1200, 35),
woosh.Token(woosh.NAME, 'option', 1200, 35, 1200, 41),
woosh.Token(woosh.OP, '=', 1200, 41, 1200, 42),
woosh.Token(woosh.NAME, 'option', 1200, 42, 1200, 48),
woosh.Token(woosh.OP, ',', 1200, 48, 1200, 49),
woosh.Token(woosh.NAME, 'value', 1200, 50, 1200, 55),
woosh.Token(woosh.OP, '=', 1200, 55, 1200, 56),
woosh.Token(woosh.NAME, 'value', 1200, 56, 1200, 61),
woosh.Token(woosh.OP, ')', 1200, 61, 1200, 62),
woosh.Token(woosh.NEWLINE, '\r\n', 1200, 62, 1201, 0),
woosh.Token(woosh.NAME, 'super', 1201, 8, 1201, 13),
woosh.Token(woosh.OP, '(', 1201, 13, 1201, 14),
woosh.Token(woosh.OP, ')', 1201, 14, 1201, 15),
woosh.Token(woosh.OP, '.', 1201, 15, 1201, 16),
woosh.Token(woosh.NAME, 'set', 1201, 16, 1201, 19),
woosh.Token(woosh.OP, '(', 1201, 19, 1201, 20),
woosh.Token(woosh.NAME, 'section', 1201, 20, 1201, 27),
woosh.Token(woosh.OP, ',', 1201, 27, 1201, 28),
woosh.Token(woosh.NAME, 'option', 1201, 29, 1201, 35),
woosh.Token(woosh.OP, ',', 1201, 35, 1201, 36),
woosh.Token(woosh.NAME, 'value', 1201, 37, 1201, 42),
woosh.Token(woosh.OP, ')', 1201, 42, 1201, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1201, 43, 1202, 0),
woosh.Token(woosh.DEDENT, ' ', 1203, 0, 1203, 4),
woosh.Token(woosh.NAME, 'def', 1203, 4, 1203, 7),
woosh.Token(woosh.NAME, 'add_section', 1203, 8, 1203, 19),
woosh.Token(woosh.OP, '(', 1203, 19, 1203, 20),
woosh.Token(woosh.NAME, 'self', 1203, 20, 1203, 24),
woosh.Token(woosh.OP, ',', 1203, 24, 1203, 25),
woosh.Token(woosh.NAME, 'section', 1203, 26, 1203, 33),
woosh.Token(woosh.OP, ')', 1203, 33, 1203, 34),
woosh.Token(woosh.OP, ':', 1203, 34, 1203, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1203, 35, 1204, 0),
woosh.Token(woosh.INDENT, ' ', 1204, 0, 1204, 8),
woosh.Token(woosh.STRING, '"""Create a new section in the configuration. Extends\r\n RawConfigParser.add_section by validating if the section name is\r\n a string."""', 1204, 8, 1206, 20),
woosh.Token(woosh.NEWLINE, '\r\n', 1206, 20, 1207, 0),
woosh.Token(woosh.NAME, 'self', 1207, 8, 1207, 12),
woosh.Token(woosh.OP, '.', 1207, 12, 1207, 13),
woosh.Token(woosh.NAME, '_validate_value_types', 1207, 13, 1207, 34),
woosh.Token(woosh.OP, '(', 1207, 34, 1207, 35),
woosh.Token(woosh.NAME, 'section', 1207, 35, 1207, 42),
woosh.Token(woosh.OP, '=', 1207, 42, 1207, 43),
woosh.Token(woosh.NAME, 'section', 1207, 43, 1207, 50),
woosh.Token(woosh.OP, ')', 1207, 50, 1207, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1207, 51, 1208, 0),
woosh.Token(woosh.NAME, 'super', 1208, 8, 1208, 13),
woosh.Token(woosh.OP, '(', 1208, 13, 1208, 14),
woosh.Token(woosh.OP, ')', 1208, 14, 1208, 15),
woosh.Token(woosh.OP, '.', 1208, 15, 1208, 16),
woosh.Token(woosh.NAME, 'add_section', 1208, 16, 1208, 27),
woosh.Token(woosh.OP, '(', 1208, 27, 1208, 28),
woosh.Token(woosh.NAME, 'section', 1208, 28, 1208, 35),
woosh.Token(woosh.OP, ')', 1208, 35, 1208, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1208, 36, 1209, 0),
woosh.Token(woosh.DEDENT, ' ', 1210, 0, 1210, 4),
woosh.Token(woosh.NAME, 'def', 1210, 4, 1210, 7),
woosh.Token(woosh.NAME, '_read_defaults', 1210, 8, 1210, 22),
woosh.Token(woosh.OP, '(', 1210, 22, 1210, 23),
woosh.Token(woosh.NAME, 'self', 1210, 23, 1210, 27),
woosh.Token(woosh.OP, ',', 1210, 27, 1210, 28),
woosh.Token(woosh.NAME, 'defaults', 1210, 29, 1210, 37),
woosh.Token(woosh.OP, ')', 1210, 37, 1210, 38),
woosh.Token(woosh.OP, ':', 1210, 38, 1210, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1210, 39, 1211, 0),
woosh.Token(woosh.INDENT, ' ', 1211, 0, 1211, 8),
woosh.Token(woosh.STRING, '"""Reads the defaults passed in the initializer, implicitly converting\r\n values to strings like the rest of the API.\r\n\r\n Does not perform interpolation for backwards compatibility.\r\n """', 1211, 8, 1215, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 1215, 11, 1216, 0),
woosh.Token(woosh.NAME, 'try', 1216, 8, 1216, 11),
woosh.Token(woosh.OP, ':', 1216, 11, 1216, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1216, 12, 1217, 0),
woosh.Token(woosh.INDENT, ' ', 1217, 0, 1217, 12),
woosh.Token(woosh.NAME, 'hold_interpolation', 1217, 12, 1217, 30),
woosh.Token(woosh.OP, '=', 1217, 31, 1217, 32),
woosh.Token(woosh.NAME, 'self', 1217, 33, 1217, 37),
woosh.Token(woosh.OP, '.', 1217, 37, 1217, 38),
woosh.Token(woosh.NAME, '_interpolation', 1217, 38, 1217, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1217, 52, 1218, 0),
woosh.Token(woosh.NAME, 'self', 1218, 12, 1218, 16),
woosh.Token(woosh.OP, '.', 1218, 16, 1218, 17),
woosh.Token(woosh.NAME, '_interpolation', 1218, 17, 1218, 31),
woosh.Token(woosh.OP, '=', 1218, 32, 1218, 33),
woosh.Token(woosh.NAME, 'Interpolation', 1218, 34, 1218, 47),
woosh.Token(woosh.OP, '(', 1218, 47, 1218, 48),
woosh.Token(woosh.OP, ')', 1218, 48, 1218, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1218, 49, 1219, 0),
woosh.Token(woosh.NAME, 'self', 1219, 12, 1219, 16),
woosh.Token(woosh.OP, '.', 1219, 16, 1219, 17),
woosh.Token(woosh.NAME, 'read_dict', 1219, 17, 1219, 26),
woosh.Token(woosh.OP, '(', 1219, 26, 1219, 27),
woosh.Token(woosh.OP, '{', 1219, 27, 1219, 28),
woosh.Token(woosh.NAME, 'self', 1219, 28, 1219, 32),
woosh.Token(woosh.OP, '.', 1219, 32, 1219, 33),
woosh.Token(woosh.NAME, 'default_section', 1219, 33, 1219, 48),
woosh.Token(woosh.OP, ':', 1219, 48, 1219, 49),
woosh.Token(woosh.NAME, 'defaults', 1219, 50, 1219, 58),
woosh.Token(woosh.OP, '}', 1219, 58, 1219, 59),
woosh.Token(woosh.OP, ')', 1219, 59, 1219, 60),
woosh.Token(woosh.NEWLINE, '\r\n', 1219, 60, 1220, 0),
woosh.Token(woosh.DEDENT, ' ', 1220, 0, 1220, 8),
woosh.Token(woosh.NAME, 'finally', 1220, 8, 1220, 15),
woosh.Token(woosh.OP, ':', 1220, 15, 1220, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1220, 16, 1221, 0),
woosh.Token(woosh.INDENT, ' ', 1221, 0, 1221, 12),
woosh.Token(woosh.NAME, 'self', 1221, 12, 1221, 16),
woosh.Token(woosh.OP, '.', 1221, 16, 1221, 17),
woosh.Token(woosh.NAME, '_interpolation', 1221, 17, 1221, 31),
woosh.Token(woosh.OP, '=', 1221, 32, 1221, 33),
woosh.Token(woosh.NAME, 'hold_interpolation', 1221, 34, 1221, 52),
woosh.Token(woosh.NEWLINE, '\r\n', 1221, 52, 1222, 0),
woosh.Token(woosh.DEDENT, '', 1224, 0, 1224, 0),
woosh.Token(woosh.DEDENT, '', 1224, 0, 1224, 0),
woosh.Token(woosh.DEDENT, '', 1224, 0, 1224, 0),
woosh.Token(woosh.NAME, 'class', 1224, 0, 1224, 5),
woosh.Token(woosh.NAME, 'SafeConfigParser', 1224, 6, 1224, 22),
woosh.Token(woosh.OP, '(', 1224, 22, 1224, 23),
woosh.Token(woosh.NAME, 'ConfigParser', 1224, 23, 1224, 35),
woosh.Token(woosh.OP, ')', 1224, 35, 1224, 36),
woosh.Token(woosh.OP, ':', 1224, 36, 1224, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1224, 37, 1225, 0),
woosh.Token(woosh.INDENT, ' ', 1225, 0, 1225, 4),
woosh.Token(woosh.STRING, '"""ConfigParser alias for backwards compatibility purposes."""', 1225, 4, 1225, 66),
woosh.Token(woosh.NEWLINE, '\r\n', 1225, 66, 1226, 0),
woosh.Token(woosh.NAME, 'def', 1227, 4, 1227, 7),
woosh.Token(woosh.NAME, '__init__', 1227, 8, 1227, 16),
woosh.Token(woosh.OP, '(', 1227, 16, 1227, 17),
woosh.Token(woosh.NAME, 'self', 1227, 17, 1227, 21),
woosh.Token(woosh.OP, ',', 1227, 21, 1227, 22),
woosh.Token(woosh.OP, '*', 1227, 23, 1227, 24),
woosh.Token(woosh.NAME, 'args', 1227, 24, 1227, 28),
woosh.Token(woosh.OP, ',', 1227, 28, 1227, 29),
woosh.Token(woosh.OP, '**', 1227, 30, 1227, 32),
woosh.Token(woosh.NAME, 'kwargs', 1227, 32, 1227, 38),
woosh.Token(woosh.OP, ')', 1227, 38, 1227, 39),
woosh.Token(woosh.OP, ':', 1227, 39, 1227, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1227, 40, 1228, 0),
woosh.Token(woosh.INDENT, ' ', 1228, 0, 1228, 8),
woosh.Token(woosh.NAME, 'super', 1228, 8, 1228, 13),
woosh.Token(woosh.OP, '(', 1228, 13, 1228, 14),
woosh.Token(woosh.OP, ')', 1228, 14, 1228, 15),
woosh.Token(woosh.OP, '.', 1228, 15, 1228, 16),
woosh.Token(woosh.NAME, '__init__', 1228, 16, 1228, 24),
woosh.Token(woosh.OP, '(', 1228, 24, 1228, 25),
woosh.Token(woosh.OP, '*', 1228, 25, 1228, 26),
woosh.Token(woosh.NAME, 'args', 1228, 26, 1228, 30),
woosh.Token(woosh.OP, ',', 1228, 30, 1228, 31),
woosh.Token(woosh.OP, '**', 1228, 32, 1228, 34),
woosh.Token(woosh.NAME, 'kwargs', 1228, 34, 1228, 40),
woosh.Token(woosh.OP, ')', 1228, 40, 1228, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1228, 41, 1229, 0),
woosh.Token(woosh.NAME, 'warnings', 1229, 8, 1229, 16),
woosh.Token(woosh.OP, '.', 1229, 16, 1229, 17),
woosh.Token(woosh.NAME, 'warn', 1229, 17, 1229, 21),
woosh.Token(woosh.OP, '(', 1229, 21, 1229, 22),
woosh.Token(woosh.STRING, '"The SafeConfigParser class has been renamed to ConfigParser "', 1230, 12, 1230, 74),
woosh.Token(woosh.STRING, '"in Python 3.2. This alias will be removed in future versions."', 1231, 12, 1231, 75),
woosh.Token(woosh.STRING, '" Use ConfigParser directly instead."', 1232, 12, 1232, 49),
woosh.Token(woosh.OP, ',', 1232, 49, 1232, 50),
woosh.Token(woosh.NAME, 'DeprecationWarning', 1233, 12, 1233, 30),
woosh.Token(woosh.OP, ',', 1233, 30, 1233, 31),
woosh.Token(woosh.NAME, 'stacklevel', 1233, 32, 1233, 42),
woosh.Token(woosh.OP, '=', 1233, 42, 1233, 43),
woosh.Token(woosh.NUMBER, '2', 1233, 43, 1233, 44),
woosh.Token(woosh.OP, ')', 1234, 8, 1234, 9),
woosh.Token(woosh.NEWLINE, '\r\n', 1234, 9, 1235, 0),
woosh.Token(woosh.DEDENT, '', 1237, 0, 1237, 0),
woosh.Token(woosh.DEDENT, '', 1237, 0, 1237, 0),
woosh.Token(woosh.NAME, 'class', 1237, 0, 1237, 5),
woosh.Token(woosh.NAME, 'SectionProxy', 1237, 6, 1237, 18),
woosh.Token(woosh.OP, '(', 1237, 18, 1237, 19),
woosh.Token(woosh.NAME, 'MutableMapping', 1237, 19, 1237, 33),
woosh.Token(woosh.OP, ')', 1237, 33, 1237, 34),
woosh.Token(woosh.OP, ':', 1237, 34, 1237, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1237, 35, 1238, 0),
woosh.Token(woosh.INDENT, ' ', 1238, 0, 1238, 4),
woosh.Token(woosh.STRING, '"""A proxy for a single section from a parser."""', 1238, 4, 1238, 53),
woosh.Token(woosh.NEWLINE, '\r\n', 1238, 53, 1239, 0),
woosh.Token(woosh.NAME, 'def', 1240, 4, 1240, 7),
woosh.Token(woosh.NAME, '__init__', 1240, 8, 1240, 16),
woosh.Token(woosh.OP, '(', 1240, 16, 1240, 17),
woosh.Token(woosh.NAME, 'self', 1240, 17, 1240, 21),
woosh.Token(woosh.OP, ',', 1240, 21, 1240, 22),
woosh.Token(woosh.NAME, 'parser', 1240, 23, 1240, 29),
woosh.Token(woosh.OP, ',', 1240, 29, 1240, 30),
woosh.Token(woosh.NAME, 'name', 1240, 31, 1240, 35),
woosh.Token(woosh.OP, ')', 1240, 35, 1240, 36),
woosh.Token(woosh.OP, ':', 1240, 36, 1240, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1240, 37, 1241, 0),
woosh.Token(woosh.INDENT, ' ', 1241, 0, 1241, 8),
woosh.Token(woosh.STRING, '"""Creates a view on a section of the specified `name` in `parser`."""', 1241, 8, 1241, 78),
woosh.Token(woosh.NEWLINE, '\r\n', 1241, 78, 1242, 0),
woosh.Token(woosh.NAME, 'self', 1242, 8, 1242, 12),
woosh.Token(woosh.OP, '.', 1242, 12, 1242, 13),
woosh.Token(woosh.NAME, '_parser', 1242, 13, 1242, 20),
woosh.Token(woosh.OP, '=', 1242, 21, 1242, 22),
woosh.Token(woosh.NAME, 'parser', 1242, 23, 1242, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1242, 29, 1243, 0),
woosh.Token(woosh.NAME, 'self', 1243, 8, 1243, 12),
woosh.Token(woosh.OP, '.', 1243, 12, 1243, 13),
woosh.Token(woosh.NAME, '_name', 1243, 13, 1243, 18),
woosh.Token(woosh.OP, '=', 1243, 19, 1243, 20),
woosh.Token(woosh.NAME, 'name', 1243, 21, 1243, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1243, 25, 1244, 0),
woosh.Token(woosh.NAME, 'for', 1244, 8, 1244, 11),
woosh.Token(woosh.NAME, 'conv', 1244, 12, 1244, 16),
woosh.Token(woosh.NAME, 'in', 1244, 17, 1244, 19),
woosh.Token(woosh.NAME, 'parser', 1244, 20, 1244, 26),
woosh.Token(woosh.OP, '.', 1244, 26, 1244, 27),
woosh.Token(woosh.NAME, 'converters', 1244, 27, 1244, 37),
woosh.Token(woosh.OP, ':', 1244, 37, 1244, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1244, 38, 1245, 0),
woosh.Token(woosh.INDENT, ' ', 1245, 0, 1245, 12),
woosh.Token(woosh.NAME, 'key', 1245, 12, 1245, 15),
woosh.Token(woosh.OP, '=', 1245, 16, 1245, 17),
woosh.Token(woosh.STRING, "'get'", 1245, 18, 1245, 23),
woosh.Token(woosh.OP, '+', 1245, 24, 1245, 25),
woosh.Token(woosh.NAME, 'conv', 1245, 26, 1245, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1245, 30, 1246, 0),
woosh.Token(woosh.NAME, 'getter', 1246, 12, 1246, 18),
woosh.Token(woosh.OP, '=', 1246, 19, 1246, 20),
woosh.Token(woosh.NAME, 'functools', 1246, 21, 1246, 30),
woosh.Token(woosh.OP, '.', 1246, 30, 1246, 31),
woosh.Token(woosh.NAME, 'partial', 1246, 31, 1246, 38),
woosh.Token(woosh.OP, '(', 1246, 38, 1246, 39),
woosh.Token(woosh.NAME, 'self', 1246, 39, 1246, 43),
woosh.Token(woosh.OP, '.', 1246, 43, 1246, 44),
woosh.Token(woosh.NAME, 'get', 1246, 44, 1246, 47),
woosh.Token(woosh.OP, ',', 1246, 47, 1246, 48),
woosh.Token(woosh.NAME, '_impl', 1246, 49, 1246, 54),
woosh.Token(woosh.OP, '=', 1246, 54, 1246, 55),
woosh.Token(woosh.NAME, 'getattr', 1246, 55, 1246, 62),
woosh.Token(woosh.OP, '(', 1246, 62, 1246, 63),
woosh.Token(woosh.NAME, 'parser', 1246, 63, 1246, 69),
woosh.Token(woosh.OP, ',', 1246, 69, 1246, 70),
woosh.Token(woosh.NAME, 'key', 1246, 71, 1246, 74),
woosh.Token(woosh.OP, ')', 1246, 74, 1246, 75),
woosh.Token(woosh.OP, ')', 1246, 75, 1246, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1246, 76, 1247, 0),
woosh.Token(woosh.NAME, 'setattr', 1247, 12, 1247, 19),
woosh.Token(woosh.OP, '(', 1247, 19, 1247, 20),
woosh.Token(woosh.NAME, 'self', 1247, 20, 1247, 24),
woosh.Token(woosh.OP, ',', 1247, 24, 1247, 25),
woosh.Token(woosh.NAME, 'key', 1247, 26, 1247, 29),
woosh.Token(woosh.OP, ',', 1247, 29, 1247, 30),
woosh.Token(woosh.NAME, 'getter', 1247, 31, 1247, 37),
woosh.Token(woosh.OP, ')', 1247, 37, 1247, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1247, 38, 1248, 0),
woosh.Token(woosh.DEDENT, ' ', 1249, 0, 1249, 4),
woosh.Token(woosh.DEDENT, '', 1249, 4, 1249, 4),
woosh.Token(woosh.NAME, 'def', 1249, 4, 1249, 7),
woosh.Token(woosh.NAME, '__repr__', 1249, 8, 1249, 16),
woosh.Token(woosh.OP, '(', 1249, 16, 1249, 17),
woosh.Token(woosh.NAME, 'self', 1249, 17, 1249, 21),
woosh.Token(woosh.OP, ')', 1249, 21, 1249, 22),
woosh.Token(woosh.OP, ':', 1249, 22, 1249, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1249, 23, 1250, 0),
woosh.Token(woosh.INDENT, ' ', 1250, 0, 1250, 8),
woosh.Token(woosh.NAME, 'return', 1250, 8, 1250, 14),
woosh.Token(woosh.STRING, "'<Section: {}>'", 1250, 15, 1250, 30),
woosh.Token(woosh.OP, '.', 1250, 30, 1250, 31),
woosh.Token(woosh.NAME, 'format', 1250, 31, 1250, 37),
woosh.Token(woosh.OP, '(', 1250, 37, 1250, 38),
woosh.Token(woosh.NAME, 'self', 1250, 38, 1250, 42),
woosh.Token(woosh.OP, '.', 1250, 42, 1250, 43),
woosh.Token(woosh.NAME, '_name', 1250, 43, 1250, 48),
woosh.Token(woosh.OP, ')', 1250, 48, 1250, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1250, 49, 1251, 0),
woosh.Token(woosh.DEDENT, ' ', 1252, 0, 1252, 4),
woosh.Token(woosh.NAME, 'def', 1252, 4, 1252, 7),
woosh.Token(woosh.NAME, '__getitem__', 1252, 8, 1252, 19),
woosh.Token(woosh.OP, '(', 1252, 19, 1252, 20),
woosh.Token(woosh.NAME, 'self', 1252, 20, 1252, 24),
woosh.Token(woosh.OP, ',', 1252, 24, 1252, 25),
woosh.Token(woosh.NAME, 'key', 1252, 26, 1252, 29),
woosh.Token(woosh.OP, ')', 1252, 29, 1252, 30),
woosh.Token(woosh.OP, ':', 1252, 30, 1252, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1252, 31, 1253, 0),
woosh.Token(woosh.INDENT, ' ', 1253, 0, 1253, 8),
woosh.Token(woosh.NAME, 'if', 1253, 8, 1253, 10),
woosh.Token(woosh.NAME, 'not', 1253, 11, 1253, 14),
woosh.Token(woosh.NAME, 'self', 1253, 15, 1253, 19),
woosh.Token(woosh.OP, '.', 1253, 19, 1253, 20),
woosh.Token(woosh.NAME, '_parser', 1253, 20, 1253, 27),
woosh.Token(woosh.OP, '.', 1253, 27, 1253, 28),
woosh.Token(woosh.NAME, 'has_option', 1253, 28, 1253, 38),
woosh.Token(woosh.OP, '(', 1253, 38, 1253, 39),
woosh.Token(woosh.NAME, 'self', 1253, 39, 1253, 43),
woosh.Token(woosh.OP, '.', 1253, 43, 1253, 44),
woosh.Token(woosh.NAME, '_name', 1253, 44, 1253, 49),
woosh.Token(woosh.OP, ',', 1253, 49, 1253, 50),
woosh.Token(woosh.NAME, 'key', 1253, 51, 1253, 54),
woosh.Token(woosh.OP, ')', 1253, 54, 1253, 55),
woosh.Token(woosh.OP, ':', 1253, 55, 1253, 56),
woosh.Token(woosh.NEWLINE, '\r\n', 1253, 56, 1254, 0),
woosh.Token(woosh.INDENT, ' ', 1254, 0, 1254, 12),
woosh.Token(woosh.NAME, 'raise', 1254, 12, 1254, 17),
woosh.Token(woosh.NAME, 'KeyError', 1254, 18, 1254, 26),
woosh.Token(woosh.OP, '(', 1254, 26, 1254, 27),
woosh.Token(woosh.NAME, 'key', 1254, 27, 1254, 30),
woosh.Token(woosh.OP, ')', 1254, 30, 1254, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1254, 31, 1255, 0),
woosh.Token(woosh.DEDENT, ' ', 1255, 0, 1255, 8),
woosh.Token(woosh.NAME, 'return', 1255, 8, 1255, 14),
woosh.Token(woosh.NAME, 'self', 1255, 15, 1255, 19),
woosh.Token(woosh.OP, '.', 1255, 19, 1255, 20),
woosh.Token(woosh.NAME, '_parser', 1255, 20, 1255, 27),
woosh.Token(woosh.OP, '.', 1255, 27, 1255, 28),
woosh.Token(woosh.NAME, 'get', 1255, 28, 1255, 31),
woosh.Token(woosh.OP, '(', 1255, 31, 1255, 32),
woosh.Token(woosh.NAME, 'self', 1255, 32, 1255, 36),
woosh.Token(woosh.OP, '.', 1255, 36, 1255, 37),
woosh.Token(woosh.NAME, '_name', 1255, 37, 1255, 42),
woosh.Token(woosh.OP, ',', 1255, 42, 1255, 43),
woosh.Token(woosh.NAME, 'key', 1255, 44, 1255, 47),
woosh.Token(woosh.OP, ')', 1255, 47, 1255, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1255, 48, 1256, 0),
woosh.Token(woosh.DEDENT, ' ', 1257, 0, 1257, 4),
woosh.Token(woosh.NAME, 'def', 1257, 4, 1257, 7),
woosh.Token(woosh.NAME, '__setitem__', 1257, 8, 1257, 19),
woosh.Token(woosh.OP, '(', 1257, 19, 1257, 20),
woosh.Token(woosh.NAME, 'self', 1257, 20, 1257, 24),
woosh.Token(woosh.OP, ',', 1257, 24, 1257, 25),
woosh.Token(woosh.NAME, 'key', 1257, 26, 1257, 29),
woosh.Token(woosh.OP, ',', 1257, 29, 1257, 30),
woosh.Token(woosh.NAME, 'value', 1257, 31, 1257, 36),
woosh.Token(woosh.OP, ')', 1257, 36, 1257, 37),
woosh.Token(woosh.OP, ':', 1257, 37, 1257, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1257, 38, 1258, 0),
woosh.Token(woosh.INDENT, ' ', 1258, 0, 1258, 8),
woosh.Token(woosh.NAME, 'self', 1258, 8, 1258, 12),
woosh.Token(woosh.OP, '.', 1258, 12, 1258, 13),
woosh.Token(woosh.NAME, '_parser', 1258, 13, 1258, 20),
woosh.Token(woosh.OP, '.', 1258, 20, 1258, 21),
woosh.Token(woosh.NAME, '_validate_value_types', 1258, 21, 1258, 42),
woosh.Token(woosh.OP, '(', 1258, 42, 1258, 43),
woosh.Token(woosh.NAME, 'option', 1258, 43, 1258, 49),
woosh.Token(woosh.OP, '=', 1258, 49, 1258, 50),
woosh.Token(woosh.NAME, 'key', 1258, 50, 1258, 53),
woosh.Token(woosh.OP, ',', 1258, 53, 1258, 54),
woosh.Token(woosh.NAME, 'value', 1258, 55, 1258, 60),
woosh.Token(woosh.OP, '=', 1258, 60, 1258, 61),
woosh.Token(woosh.NAME, 'value', 1258, 61, 1258, 66),
woosh.Token(woosh.OP, ')', 1258, 66, 1258, 67),
woosh.Token(woosh.NEWLINE, '\r\n', 1258, 67, 1259, 0),
woosh.Token(woosh.NAME, 'return', 1259, 8, 1259, 14),
woosh.Token(woosh.NAME, 'self', 1259, 15, 1259, 19),
woosh.Token(woosh.OP, '.', 1259, 19, 1259, 20),
woosh.Token(woosh.NAME, '_parser', 1259, 20, 1259, 27),
woosh.Token(woosh.OP, '.', 1259, 27, 1259, 28),
woosh.Token(woosh.NAME, 'set', 1259, 28, 1259, 31),
woosh.Token(woosh.OP, '(', 1259, 31, 1259, 32),
woosh.Token(woosh.NAME, 'self', 1259, 32, 1259, 36),
woosh.Token(woosh.OP, '.', 1259, 36, 1259, 37),
woosh.Token(woosh.NAME, '_name', 1259, 37, 1259, 42),
woosh.Token(woosh.OP, ',', 1259, 42, 1259, 43),
woosh.Token(woosh.NAME, 'key', 1259, 44, 1259, 47),
woosh.Token(woosh.OP, ',', 1259, 47, 1259, 48),
woosh.Token(woosh.NAME, 'value', 1259, 49, 1259, 54),
woosh.Token(woosh.OP, ')', 1259, 54, 1259, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1259, 55, 1260, 0),
woosh.Token(woosh.DEDENT, ' ', 1261, 0, 1261, 4),
woosh.Token(woosh.NAME, 'def', 1261, 4, 1261, 7),
woosh.Token(woosh.NAME, '__delitem__', 1261, 8, 1261, 19),
woosh.Token(woosh.OP, '(', 1261, 19, 1261, 20),
woosh.Token(woosh.NAME, 'self', 1261, 20, 1261, 24),
woosh.Token(woosh.OP, ',', 1261, 24, 1261, 25),
woosh.Token(woosh.NAME, 'key', 1261, 26, 1261, 29),
woosh.Token(woosh.OP, ')', 1261, 29, 1261, 30),
woosh.Token(woosh.OP, ':', 1261, 30, 1261, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1261, 31, 1262, 0),
woosh.Token(woosh.INDENT, ' ', 1262, 0, 1262, 8),
woosh.Token(woosh.NAME, 'if', 1262, 8, 1262, 10),
woosh.Token(woosh.NAME, 'not', 1262, 11, 1262, 14),
woosh.Token(woosh.OP, '(', 1262, 15, 1262, 16),
woosh.Token(woosh.NAME, 'self', 1262, 16, 1262, 20),
woosh.Token(woosh.OP, '.', 1262, 20, 1262, 21),
woosh.Token(woosh.NAME, '_parser', 1262, 21, 1262, 28),
woosh.Token(woosh.OP, '.', 1262, 28, 1262, 29),
woosh.Token(woosh.NAME, 'has_option', 1262, 29, 1262, 39),
woosh.Token(woosh.OP, '(', 1262, 39, 1262, 40),
woosh.Token(woosh.NAME, 'self', 1262, 40, 1262, 44),
woosh.Token(woosh.OP, '.', 1262, 44, 1262, 45),
woosh.Token(woosh.NAME, '_name', 1262, 45, 1262, 50),
woosh.Token(woosh.OP, ',', 1262, 50, 1262, 51),
woosh.Token(woosh.NAME, 'key', 1262, 52, 1262, 55),
woosh.Token(woosh.OP, ')', 1262, 55, 1262, 56),
woosh.Token(woosh.NAME, 'and', 1262, 57, 1262, 60),
woosh.Token(woosh.NAME, 'self', 1263, 16, 1263, 20),
woosh.Token(woosh.OP, '.', 1263, 20, 1263, 21),
woosh.Token(woosh.NAME, '_parser', 1263, 21, 1263, 28),
woosh.Token(woosh.OP, '.', 1263, 28, 1263, 29),
woosh.Token(woosh.NAME, 'remove_option', 1263, 29, 1263, 42),
woosh.Token(woosh.OP, '(', 1263, 42, 1263, 43),
woosh.Token(woosh.NAME, 'self', 1263, 43, 1263, 47),
woosh.Token(woosh.OP, '.', 1263, 47, 1263, 48),
woosh.Token(woosh.NAME, '_name', 1263, 48, 1263, 53),
woosh.Token(woosh.OP, ',', 1263, 53, 1263, 54),
woosh.Token(woosh.NAME, 'key', 1263, 55, 1263, 58),
woosh.Token(woosh.OP, ')', 1263, 58, 1263, 59),
woosh.Token(woosh.OP, ')', 1263, 59, 1263, 60),
woosh.Token(woosh.OP, ':', 1263, 60, 1263, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1263, 61, 1264, 0),
woosh.Token(woosh.INDENT, ' ', 1264, 0, 1264, 12),
woosh.Token(woosh.NAME, 'raise', 1264, 12, 1264, 17),
woosh.Token(woosh.NAME, 'KeyError', 1264, 18, 1264, 26),
woosh.Token(woosh.OP, '(', 1264, 26, 1264, 27),
woosh.Token(woosh.NAME, 'key', 1264, 27, 1264, 30),
woosh.Token(woosh.OP, ')', 1264, 30, 1264, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1264, 31, 1265, 0),
woosh.Token(woosh.DEDENT, ' ', 1266, 0, 1266, 4),
woosh.Token(woosh.DEDENT, '', 1266, 4, 1266, 4),
woosh.Token(woosh.NAME, 'def', 1266, 4, 1266, 7),
woosh.Token(woosh.NAME, '__contains__', 1266, 8, 1266, 20),
woosh.Token(woosh.OP, '(', 1266, 20, 1266, 21),
woosh.Token(woosh.NAME, 'self', 1266, 21, 1266, 25),
woosh.Token(woosh.OP, ',', 1266, 25, 1266, 26),
woosh.Token(woosh.NAME, 'key', 1266, 27, 1266, 30),
woosh.Token(woosh.OP, ')', 1266, 30, 1266, 31),
woosh.Token(woosh.OP, ':', 1266, 31, 1266, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1266, 32, 1267, 0),
woosh.Token(woosh.INDENT, ' ', 1267, 0, 1267, 8),
woosh.Token(woosh.NAME, 'return', 1267, 8, 1267, 14),
woosh.Token(woosh.NAME, 'self', 1267, 15, 1267, 19),
woosh.Token(woosh.OP, '.', 1267, 19, 1267, 20),
woosh.Token(woosh.NAME, '_parser', 1267, 20, 1267, 27),
woosh.Token(woosh.OP, '.', 1267, 27, 1267, 28),
woosh.Token(woosh.NAME, 'has_option', 1267, 28, 1267, 38),
woosh.Token(woosh.OP, '(', 1267, 38, 1267, 39),
woosh.Token(woosh.NAME, 'self', 1267, 39, 1267, 43),
woosh.Token(woosh.OP, '.', 1267, 43, 1267, 44),
woosh.Token(woosh.NAME, '_name', 1267, 44, 1267, 49),
woosh.Token(woosh.OP, ',', 1267, 49, 1267, 50),
woosh.Token(woosh.NAME, 'key', 1267, 51, 1267, 54),
woosh.Token(woosh.OP, ')', 1267, 54, 1267, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1267, 55, 1268, 0),
woosh.Token(woosh.DEDENT, ' ', 1269, 0, 1269, 4),
woosh.Token(woosh.NAME, 'def', 1269, 4, 1269, 7),
woosh.Token(woosh.NAME, '__len__', 1269, 8, 1269, 15),
woosh.Token(woosh.OP, '(', 1269, 15, 1269, 16),
woosh.Token(woosh.NAME, 'self', 1269, 16, 1269, 20),
woosh.Token(woosh.OP, ')', 1269, 20, 1269, 21),
woosh.Token(woosh.OP, ':', 1269, 21, 1269, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1269, 22, 1270, 0),
woosh.Token(woosh.INDENT, ' ', 1270, 0, 1270, 8),
woosh.Token(woosh.NAME, 'return', 1270, 8, 1270, 14),
woosh.Token(woosh.NAME, 'len', 1270, 15, 1270, 18),
woosh.Token(woosh.OP, '(', 1270, 18, 1270, 19),
woosh.Token(woosh.NAME, 'self', 1270, 19, 1270, 23),
woosh.Token(woosh.OP, '.', 1270, 23, 1270, 24),
woosh.Token(woosh.NAME, '_options', 1270, 24, 1270, 32),
woosh.Token(woosh.OP, '(', 1270, 32, 1270, 33),
woosh.Token(woosh.OP, ')', 1270, 33, 1270, 34),
woosh.Token(woosh.OP, ')', 1270, 34, 1270, 35),
woosh.Token(woosh.NEWLINE, '\r\n', 1270, 35, 1271, 0),
woosh.Token(woosh.DEDENT, ' ', 1272, 0, 1272, 4),
woosh.Token(woosh.NAME, 'def', 1272, 4, 1272, 7),
woosh.Token(woosh.NAME, '__iter__', 1272, 8, 1272, 16),
woosh.Token(woosh.OP, '(', 1272, 16, 1272, 17),
woosh.Token(woosh.NAME, 'self', 1272, 17, 1272, 21),
woosh.Token(woosh.OP, ')', 1272, 21, 1272, 22),
woosh.Token(woosh.OP, ':', 1272, 22, 1272, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1272, 23, 1273, 0),
woosh.Token(woosh.INDENT, ' ', 1273, 0, 1273, 8),
woosh.Token(woosh.NAME, 'return', 1273, 8, 1273, 14),
woosh.Token(woosh.NAME, 'self', 1273, 15, 1273, 19),
woosh.Token(woosh.OP, '.', 1273, 19, 1273, 20),
woosh.Token(woosh.NAME, '_options', 1273, 20, 1273, 28),
woosh.Token(woosh.OP, '(', 1273, 28, 1273, 29),
woosh.Token(woosh.OP, ')', 1273, 29, 1273, 30),
woosh.Token(woosh.OP, '.', 1273, 30, 1273, 31),
woosh.Token(woosh.NAME, '__iter__', 1273, 31, 1273, 39),
woosh.Token(woosh.OP, '(', 1273, 39, 1273, 40),
woosh.Token(woosh.OP, ')', 1273, 40, 1273, 41),
woosh.Token(woosh.NEWLINE, '\r\n', 1273, 41, 1274, 0),
woosh.Token(woosh.DEDENT, ' ', 1275, 0, 1275, 4),
woosh.Token(woosh.NAME, 'def', 1275, 4, 1275, 7),
woosh.Token(woosh.NAME, '_options', 1275, 8, 1275, 16),
woosh.Token(woosh.OP, '(', 1275, 16, 1275, 17),
woosh.Token(woosh.NAME, 'self', 1275, 17, 1275, 21),
woosh.Token(woosh.OP, ')', 1275, 21, 1275, 22),
woosh.Token(woosh.OP, ':', 1275, 22, 1275, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1275, 23, 1276, 0),
woosh.Token(woosh.INDENT, ' ', 1276, 0, 1276, 8),
woosh.Token(woosh.NAME, 'if', 1276, 8, 1276, 10),
woosh.Token(woosh.NAME, 'self', 1276, 11, 1276, 15),
woosh.Token(woosh.OP, '.', 1276, 15, 1276, 16),
woosh.Token(woosh.NAME, '_name', 1276, 16, 1276, 21),
woosh.Token(woosh.OP, '!=', 1276, 22, 1276, 24),
woosh.Token(woosh.NAME, 'self', 1276, 25, 1276, 29),
woosh.Token(woosh.OP, '.', 1276, 29, 1276, 30),
woosh.Token(woosh.NAME, '_parser', 1276, 30, 1276, 37),
woosh.Token(woosh.OP, '.', 1276, 37, 1276, 38),
woosh.Token(woosh.NAME, 'default_section', 1276, 38, 1276, 53),
woosh.Token(woosh.OP, ':', 1276, 53, 1276, 54),
woosh.Token(woosh.NEWLINE, '\r\n', 1276, 54, 1277, 0),
woosh.Token(woosh.INDENT, ' ', 1277, 0, 1277, 12),
woosh.Token(woosh.NAME, 'return', 1277, 12, 1277, 18),
woosh.Token(woosh.NAME, 'self', 1277, 19, 1277, 23),
woosh.Token(woosh.OP, '.', 1277, 23, 1277, 24),
woosh.Token(woosh.NAME, '_parser', 1277, 24, 1277, 31),
woosh.Token(woosh.OP, '.', 1277, 31, 1277, 32),
woosh.Token(woosh.NAME, 'options', 1277, 32, 1277, 39),
woosh.Token(woosh.OP, '(', 1277, 39, 1277, 40),
woosh.Token(woosh.NAME, 'self', 1277, 40, 1277, 44),
woosh.Token(woosh.OP, '.', 1277, 44, 1277, 45),
woosh.Token(woosh.NAME, '_name', 1277, 45, 1277, 50),
woosh.Token(woosh.OP, ')', 1277, 50, 1277, 51),
woosh.Token(woosh.NEWLINE, '\r\n', 1277, 51, 1278, 0),
woosh.Token(woosh.DEDENT, ' ', 1278, 0, 1278, 8),
woosh.Token(woosh.NAME, 'else', 1278, 8, 1278, 12),
woosh.Token(woosh.OP, ':', 1278, 12, 1278, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1278, 13, 1279, 0),
woosh.Token(woosh.INDENT, ' ', 1279, 0, 1279, 12),
woosh.Token(woosh.NAME, 'return', 1279, 12, 1279, 18),
woosh.Token(woosh.NAME, 'self', 1279, 19, 1279, 23),
woosh.Token(woosh.OP, '.', 1279, 23, 1279, 24),
woosh.Token(woosh.NAME, '_parser', 1279, 24, 1279, 31),
woosh.Token(woosh.OP, '.', 1279, 31, 1279, 32),
woosh.Token(woosh.NAME, 'defaults', 1279, 32, 1279, 40),
woosh.Token(woosh.OP, '(', 1279, 40, 1279, 41),
woosh.Token(woosh.OP, ')', 1279, 41, 1279, 42),
woosh.Token(woosh.NEWLINE, '\r\n', 1279, 42, 1280, 0),
woosh.Token(woosh.DEDENT, ' ', 1281, 0, 1281, 4),
woosh.Token(woosh.DEDENT, '', 1281, 4, 1281, 4),
woosh.Token(woosh.OP, '@', 1281, 4, 1281, 5),
woosh.Token(woosh.NAME, 'property', 1281, 5, 1281, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1281, 13, 1282, 0),
woosh.Token(woosh.NAME, 'def', 1282, 4, 1282, 7),
woosh.Token(woosh.NAME, 'parser', 1282, 8, 1282, 14),
woosh.Token(woosh.OP, '(', 1282, 14, 1282, 15),
woosh.Token(woosh.NAME, 'self', 1282, 15, 1282, 19),
woosh.Token(woosh.OP, ')', 1282, 19, 1282, 20),
woosh.Token(woosh.OP, ':', 1282, 20, 1282, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1282, 21, 1283, 0),
woosh.Token(woosh.COMMENT, '# The parser object of the proxy is read-only.', 1283, 8, 1283, 54),
woosh.Token(woosh.INDENT, ' ', 1284, 0, 1284, 8),
woosh.Token(woosh.NAME, 'return', 1284, 8, 1284, 14),
woosh.Token(woosh.NAME, 'self', 1284, 15, 1284, 19),
woosh.Token(woosh.OP, '.', 1284, 19, 1284, 20),
woosh.Token(woosh.NAME, '_parser', 1284, 20, 1284, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1284, 27, 1285, 0),
woosh.Token(woosh.DEDENT, ' ', 1286, 0, 1286, 4),
woosh.Token(woosh.OP, '@', 1286, 4, 1286, 5),
woosh.Token(woosh.NAME, 'property', 1286, 5, 1286, 13),
woosh.Token(woosh.NEWLINE, '\r\n', 1286, 13, 1287, 0),
woosh.Token(woosh.NAME, 'def', 1287, 4, 1287, 7),
woosh.Token(woosh.NAME, 'name', 1287, 8, 1287, 12),
woosh.Token(woosh.OP, '(', 1287, 12, 1287, 13),
woosh.Token(woosh.NAME, 'self', 1287, 13, 1287, 17),
woosh.Token(woosh.OP, ')', 1287, 17, 1287, 18),
woosh.Token(woosh.OP, ':', 1287, 18, 1287, 19),
woosh.Token(woosh.NEWLINE, '\r\n', 1287, 19, 1288, 0),
woosh.Token(woosh.COMMENT, '# The name of the section on a proxy is read-only.', 1288, 8, 1288, 58),
woosh.Token(woosh.INDENT, ' ', 1289, 0, 1289, 8),
woosh.Token(woosh.NAME, 'return', 1289, 8, 1289, 14),
woosh.Token(woosh.NAME, 'self', 1289, 15, 1289, 19),
woosh.Token(woosh.OP, '.', 1289, 19, 1289, 20),
woosh.Token(woosh.NAME, '_name', 1289, 20, 1289, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1289, 25, 1290, 0),
woosh.Token(woosh.DEDENT, ' ', 1291, 0, 1291, 4),
woosh.Token(woosh.NAME, 'def', 1291, 4, 1291, 7),
woosh.Token(woosh.NAME, 'get', 1291, 8, 1291, 11),
woosh.Token(woosh.OP, '(', 1291, 11, 1291, 12),
woosh.Token(woosh.NAME, 'self', 1291, 12, 1291, 16),
woosh.Token(woosh.OP, ',', 1291, 16, 1291, 17),
woosh.Token(woosh.NAME, 'option', 1291, 18, 1291, 24),
woosh.Token(woosh.OP, ',', 1291, 24, 1291, 25),
woosh.Token(woosh.NAME, 'fallback', 1291, 26, 1291, 34),
woosh.Token(woosh.OP, '=', 1291, 34, 1291, 35),
woosh.Token(woosh.NAME, 'None', 1291, 35, 1291, 39),
woosh.Token(woosh.OP, ',', 1291, 39, 1291, 40),
woosh.Token(woosh.OP, '*', 1291, 41, 1291, 42),
woosh.Token(woosh.OP, ',', 1291, 42, 1291, 43),
woosh.Token(woosh.NAME, 'raw', 1291, 44, 1291, 47),
woosh.Token(woosh.OP, '=', 1291, 47, 1291, 48),
woosh.Token(woosh.NAME, 'False', 1291, 48, 1291, 53),
woosh.Token(woosh.OP, ',', 1291, 53, 1291, 54),
woosh.Token(woosh.NAME, 'vars', 1291, 55, 1291, 59),
woosh.Token(woosh.OP, '=', 1291, 59, 1291, 60),
woosh.Token(woosh.NAME, 'None', 1291, 60, 1291, 64),
woosh.Token(woosh.OP, ',', 1291, 64, 1291, 65),
woosh.Token(woosh.NAME, '_impl', 1292, 12, 1292, 17),
woosh.Token(woosh.OP, '=', 1292, 17, 1292, 18),
woosh.Token(woosh.NAME, 'None', 1292, 18, 1292, 22),
woosh.Token(woosh.OP, ',', 1292, 22, 1292, 23),
woosh.Token(woosh.OP, '**', 1292, 24, 1292, 26),
woosh.Token(woosh.NAME, 'kwargs', 1292, 26, 1292, 32),
woosh.Token(woosh.OP, ')', 1292, 32, 1292, 33),
woosh.Token(woosh.OP, ':', 1292, 33, 1292, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1292, 34, 1293, 0),
woosh.Token(woosh.INDENT, ' ', 1293, 0, 1293, 8),
woosh.Token(woosh.STRING, '"""Get an option value.\r\n\r\n Unless `fallback` is provided, `None` will be returned if the option\r\n is not found.\r\n\r\n """', 1293, 8, 1298, 11),
woosh.Token(woosh.NEWLINE, '\r\n', 1298, 11, 1299, 0),
woosh.Token(woosh.COMMENT, '# If `_impl` is provided, it should be a getter method on the parser', 1299, 8, 1299, 76),
woosh.Token(woosh.COMMENT, '# object that provides the desired type conversion.', 1300, 8, 1300, 59),
woosh.Token(woosh.NAME, 'if', 1301, 8, 1301, 10),
woosh.Token(woosh.NAME, 'not', 1301, 11, 1301, 14),
woosh.Token(woosh.NAME, '_impl', 1301, 15, 1301, 20),
woosh.Token(woosh.OP, ':', 1301, 20, 1301, 21),
woosh.Token(woosh.NEWLINE, '\r\n', 1301, 21, 1302, 0),
woosh.Token(woosh.INDENT, ' ', 1302, 0, 1302, 12),
woosh.Token(woosh.NAME, '_impl', 1302, 12, 1302, 17),
woosh.Token(woosh.OP, '=', 1302, 18, 1302, 19),
woosh.Token(woosh.NAME, 'self', 1302, 20, 1302, 24),
woosh.Token(woosh.OP, '.', 1302, 24, 1302, 25),
woosh.Token(woosh.NAME, '_parser', 1302, 25, 1302, 32),
woosh.Token(woosh.OP, '.', 1302, 32, 1302, 33),
woosh.Token(woosh.NAME, 'get', 1302, 33, 1302, 36),
woosh.Token(woosh.NEWLINE, '\r\n', 1302, 36, 1303, 0),
woosh.Token(woosh.DEDENT, ' ', 1303, 0, 1303, 8),
woosh.Token(woosh.NAME, 'return', 1303, 8, 1303, 14),
woosh.Token(woosh.NAME, '_impl', 1303, 15, 1303, 20),
woosh.Token(woosh.OP, '(', 1303, 20, 1303, 21),
woosh.Token(woosh.NAME, 'self', 1303, 21, 1303, 25),
woosh.Token(woosh.OP, '.', 1303, 25, 1303, 26),
woosh.Token(woosh.NAME, '_name', 1303, 26, 1303, 31),
woosh.Token(woosh.OP, ',', 1303, 31, 1303, 32),
woosh.Token(woosh.NAME, 'option', 1303, 33, 1303, 39),
woosh.Token(woosh.OP, ',', 1303, 39, 1303, 40),
woosh.Token(woosh.NAME, 'raw', 1303, 41, 1303, 44),
woosh.Token(woosh.OP, '=', 1303, 44, 1303, 45),
woosh.Token(woosh.NAME, 'raw', 1303, 45, 1303, 48),
woosh.Token(woosh.OP, ',', 1303, 48, 1303, 49),
woosh.Token(woosh.NAME, 'vars', 1303, 50, 1303, 54),
woosh.Token(woosh.OP, '=', 1303, 54, 1303, 55),
woosh.Token(woosh.NAME, 'vars', 1303, 55, 1303, 59),
woosh.Token(woosh.OP, ',', 1303, 59, 1303, 60),
woosh.Token(woosh.NAME, 'fallback', 1304, 21, 1304, 29),
woosh.Token(woosh.OP, '=', 1304, 29, 1304, 30),
woosh.Token(woosh.NAME, 'fallback', 1304, 30, 1304, 38),
woosh.Token(woosh.OP, ',', 1304, 38, 1304, 39),
woosh.Token(woosh.OP, '**', 1304, 40, 1304, 42),
woosh.Token(woosh.NAME, 'kwargs', 1304, 42, 1304, 48),
woosh.Token(woosh.OP, ')', 1304, 48, 1304, 49),
woosh.Token(woosh.NEWLINE, '\r\n', 1304, 49, 1305, 0),
woosh.Token(woosh.DEDENT, '', 1307, 0, 1307, 0),
woosh.Token(woosh.DEDENT, '', 1307, 0, 1307, 0),
woosh.Token(woosh.NAME, 'class', 1307, 0, 1307, 5),
woosh.Token(woosh.NAME, 'ConverterMapping', 1307, 6, 1307, 22),
woosh.Token(woosh.OP, '(', 1307, 22, 1307, 23),
woosh.Token(woosh.NAME, 'MutableMapping', 1307, 23, 1307, 37),
woosh.Token(woosh.OP, ')', 1307, 37, 1307, 38),
woosh.Token(woosh.OP, ':', 1307, 38, 1307, 39),
woosh.Token(woosh.NEWLINE, '\r\n', 1307, 39, 1308, 0),
woosh.Token(woosh.INDENT, ' ', 1308, 0, 1308, 4),
woosh.Token(woosh.STRING, '"""Enables reuse of get*() methods between the parser and section proxies.\r\n\r\n If a parser class implements a getter directly, the value for the given\r\n key will be ``None``. The presence of the converter name here enables\r\n section proxies to find and use the implementation on the parser class.\r\n """', 1308, 4, 1313, 7),
woosh.Token(woosh.NEWLINE, '\r\n', 1313, 7, 1314, 0),
woosh.Token(woosh.NAME, 'GETTERCRE', 1315, 4, 1315, 13),
woosh.Token(woosh.OP, '=', 1315, 14, 1315, 15),
woosh.Token(woosh.NAME, 're', 1315, 16, 1315, 18),
woosh.Token(woosh.OP, '.', 1315, 18, 1315, 19),
woosh.Token(woosh.NAME, 'compile', 1315, 19, 1315, 26),
woosh.Token(woosh.OP, '(', 1315, 26, 1315, 27),
woosh.Token(woosh.STRING, 'r"^get(?P<name>.+)$"', 1315, 27, 1315, 47),
woosh.Token(woosh.OP, ')', 1315, 47, 1315, 48),
woosh.Token(woosh.NEWLINE, '\r\n', 1315, 48, 1316, 0),
woosh.Token(woosh.NAME, 'def', 1317, 4, 1317, 7),
woosh.Token(woosh.NAME, '__init__', 1317, 8, 1317, 16),
woosh.Token(woosh.OP, '(', 1317, 16, 1317, 17),
woosh.Token(woosh.NAME, 'self', 1317, 17, 1317, 21),
woosh.Token(woosh.OP, ',', 1317, 21, 1317, 22),
woosh.Token(woosh.NAME, 'parser', 1317, 23, 1317, 29),
woosh.Token(woosh.OP, ')', 1317, 29, 1317, 30),
woosh.Token(woosh.OP, ':', 1317, 30, 1317, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1317, 31, 1318, 0),
woosh.Token(woosh.INDENT, ' ', 1318, 0, 1318, 8),
woosh.Token(woosh.NAME, 'self', 1318, 8, 1318, 12),
woosh.Token(woosh.OP, '.', 1318, 12, 1318, 13),
woosh.Token(woosh.NAME, '_parser', 1318, 13, 1318, 20),
woosh.Token(woosh.OP, '=', 1318, 21, 1318, 22),
woosh.Token(woosh.NAME, 'parser', 1318, 23, 1318, 29),
woosh.Token(woosh.NEWLINE, '\r\n', 1318, 29, 1319, 0),
woosh.Token(woosh.NAME, 'self', 1319, 8, 1319, 12),
woosh.Token(woosh.OP, '.', 1319, 12, 1319, 13),
woosh.Token(woosh.NAME, '_data', 1319, 13, 1319, 18),
woosh.Token(woosh.OP, '=', 1319, 19, 1319, 20),
woosh.Token(woosh.OP, '{', 1319, 21, 1319, 22),
woosh.Token(woosh.OP, '}', 1319, 22, 1319, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1319, 23, 1320, 0),
woosh.Token(woosh.NAME, 'for', 1320, 8, 1320, 11),
woosh.Token(woosh.NAME, 'getter', 1320, 12, 1320, 18),
woosh.Token(woosh.NAME, 'in', 1320, 19, 1320, 21),
woosh.Token(woosh.NAME, 'dir', 1320, 22, 1320, 25),
woosh.Token(woosh.OP, '(', 1320, 25, 1320, 26),
woosh.Token(woosh.NAME, 'self', 1320, 26, 1320, 30),
woosh.Token(woosh.OP, '.', 1320, 30, 1320, 31),
woosh.Token(woosh.NAME, '_parser', 1320, 31, 1320, 38),
woosh.Token(woosh.OP, ')', 1320, 38, 1320, 39),
woosh.Token(woosh.OP, ':', 1320, 39, 1320, 40),
woosh.Token(woosh.NEWLINE, '\r\n', 1320, 40, 1321, 0),
woosh.Token(woosh.INDENT, ' ', 1321, 0, 1321, 12),
woosh.Token(woosh.NAME, 'm', 1321, 12, 1321, 13),
woosh.Token(woosh.OP, '=', 1321, 14, 1321, 15),
woosh.Token(woosh.NAME, 'self', 1321, 16, 1321, 20),
woosh.Token(woosh.OP, '.', 1321, 20, 1321, 21),
woosh.Token(woosh.NAME, 'GETTERCRE', 1321, 21, 1321, 30),
woosh.Token(woosh.OP, '.', 1321, 30, 1321, 31),
woosh.Token(woosh.NAME, 'match', 1321, 31, 1321, 36),
woosh.Token(woosh.OP, '(', 1321, 36, 1321, 37),
woosh.Token(woosh.NAME, 'getter', 1321, 37, 1321, 43),
woosh.Token(woosh.OP, ')', 1321, 43, 1321, 44),
woosh.Token(woosh.NEWLINE, '\r\n', 1321, 44, 1322, 0),
woosh.Token(woosh.NAME, 'if', 1322, 12, 1322, 14),
woosh.Token(woosh.NAME, 'not', 1322, 15, 1322, 18),
woosh.Token(woosh.NAME, 'm', 1322, 19, 1322, 20),
woosh.Token(woosh.NAME, 'or', 1322, 21, 1322, 23),
woosh.Token(woosh.NAME, 'not', 1322, 24, 1322, 27),
woosh.Token(woosh.NAME, 'callable', 1322, 28, 1322, 36),
woosh.Token(woosh.OP, '(', 1322, 36, 1322, 37),
woosh.Token(woosh.NAME, 'getattr', 1322, 37, 1322, 44),
woosh.Token(woosh.OP, '(', 1322, 44, 1322, 45),
woosh.Token(woosh.NAME, 'self', 1322, 45, 1322, 49),
woosh.Token(woosh.OP, '.', 1322, 49, 1322, 50),
woosh.Token(woosh.NAME, '_parser', 1322, 50, 1322, 57),
woosh.Token(woosh.OP, ',', 1322, 57, 1322, 58),
woosh.Token(woosh.NAME, 'getter', 1322, 59, 1322, 65),
woosh.Token(woosh.OP, ')', 1322, 65, 1322, 66),
woosh.Token(woosh.OP, ')', 1322, 66, 1322, 67),
woosh.Token(woosh.OP, ':', 1322, 67, 1322, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 1322, 68, 1323, 0),
woosh.Token(woosh.INDENT, ' ', 1323, 0, 1323, 16),
woosh.Token(woosh.NAME, 'continue', 1323, 16, 1323, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1323, 24, 1324, 0),
woosh.Token(woosh.DEDENT, ' ', 1324, 0, 1324, 12),
woosh.Token(woosh.NAME, 'self', 1324, 12, 1324, 16),
woosh.Token(woosh.OP, '.', 1324, 16, 1324, 17),
woosh.Token(woosh.NAME, '_data', 1324, 17, 1324, 22),
woosh.Token(woosh.OP, '[', 1324, 22, 1324, 23),
woosh.Token(woosh.NAME, 'm', 1324, 23, 1324, 24),
woosh.Token(woosh.OP, '.', 1324, 24, 1324, 25),
woosh.Token(woosh.NAME, 'group', 1324, 25, 1324, 30),
woosh.Token(woosh.OP, '(', 1324, 30, 1324, 31),
woosh.Token(woosh.STRING, "'name'", 1324, 31, 1324, 37),
woosh.Token(woosh.OP, ')', 1324, 37, 1324, 38),
woosh.Token(woosh.OP, ']', 1324, 38, 1324, 39),
woosh.Token(woosh.OP, '=', 1324, 40, 1324, 41),
woosh.Token(woosh.NAME, 'None', 1324, 42, 1324, 46),
woosh.Token(woosh.COMMENT, '# See class docstring.', 1324, 49, 1324, 71),
woosh.Token(woosh.NEWLINE, '\r\n', 1324, 71, 1325, 0),
woosh.Token(woosh.DEDENT, ' ', 1326, 0, 1326, 4),
woosh.Token(woosh.DEDENT, '', 1326, 4, 1326, 4),
woosh.Token(woosh.NAME, 'def', 1326, 4, 1326, 7),
woosh.Token(woosh.NAME, '__getitem__', 1326, 8, 1326, 19),
woosh.Token(woosh.OP, '(', 1326, 19, 1326, 20),
woosh.Token(woosh.NAME, 'self', 1326, 20, 1326, 24),
woosh.Token(woosh.OP, ',', 1326, 24, 1326, 25),
woosh.Token(woosh.NAME, 'key', 1326, 26, 1326, 29),
woosh.Token(woosh.OP, ')', 1326, 29, 1326, 30),
woosh.Token(woosh.OP, ':', 1326, 30, 1326, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1326, 31, 1327, 0),
woosh.Token(woosh.INDENT, ' ', 1327, 0, 1327, 8),
woosh.Token(woosh.NAME, 'return', 1327, 8, 1327, 14),
woosh.Token(woosh.NAME, 'self', 1327, 15, 1327, 19),
woosh.Token(woosh.OP, '.', 1327, 19, 1327, 20),
woosh.Token(woosh.NAME, '_data', 1327, 20, 1327, 25),
woosh.Token(woosh.OP, '[', 1327, 25, 1327, 26),
woosh.Token(woosh.NAME, 'key', 1327, 26, 1327, 29),
woosh.Token(woosh.OP, ']', 1327, 29, 1327, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1327, 30, 1328, 0),
woosh.Token(woosh.DEDENT, ' ', 1329, 0, 1329, 4),
woosh.Token(woosh.NAME, 'def', 1329, 4, 1329, 7),
woosh.Token(woosh.NAME, '__setitem__', 1329, 8, 1329, 19),
woosh.Token(woosh.OP, '(', 1329, 19, 1329, 20),
woosh.Token(woosh.NAME, 'self', 1329, 20, 1329, 24),
woosh.Token(woosh.OP, ',', 1329, 24, 1329, 25),
woosh.Token(woosh.NAME, 'key', 1329, 26, 1329, 29),
woosh.Token(woosh.OP, ',', 1329, 29, 1329, 30),
woosh.Token(woosh.NAME, 'value', 1329, 31, 1329, 36),
woosh.Token(woosh.OP, ')', 1329, 36, 1329, 37),
woosh.Token(woosh.OP, ':', 1329, 37, 1329, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1329, 38, 1330, 0),
woosh.Token(woosh.INDENT, ' ', 1330, 0, 1330, 8),
woosh.Token(woosh.NAME, 'try', 1330, 8, 1330, 11),
woosh.Token(woosh.OP, ':', 1330, 11, 1330, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1330, 12, 1331, 0),
woosh.Token(woosh.INDENT, ' ', 1331, 0, 1331, 12),
woosh.Token(woosh.NAME, 'k', 1331, 12, 1331, 13),
woosh.Token(woosh.OP, '=', 1331, 14, 1331, 15),
woosh.Token(woosh.STRING, "'get'", 1331, 16, 1331, 21),
woosh.Token(woosh.OP, '+', 1331, 22, 1331, 23),
woosh.Token(woosh.NAME, 'key', 1331, 24, 1331, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1331, 27, 1332, 0),
woosh.Token(woosh.DEDENT, ' ', 1332, 0, 1332, 8),
woosh.Token(woosh.NAME, 'except', 1332, 8, 1332, 14),
woosh.Token(woosh.NAME, 'TypeError', 1332, 15, 1332, 24),
woosh.Token(woosh.OP, ':', 1332, 24, 1332, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1332, 25, 1333, 0),
woosh.Token(woosh.INDENT, ' ', 1333, 0, 1333, 12),
woosh.Token(woosh.NAME, 'raise', 1333, 12, 1333, 17),
woosh.Token(woosh.NAME, 'ValueError', 1333, 18, 1333, 28),
woosh.Token(woosh.OP, '(', 1333, 28, 1333, 29),
woosh.Token(woosh.STRING, "'Incompatible key: {} (type: {})'", 1333, 29, 1333, 62),
woosh.Token(woosh.STRING, "''", 1334, 29, 1334, 31),
woosh.Token(woosh.OP, '.', 1334, 31, 1334, 32),
woosh.Token(woosh.NAME, 'format', 1334, 32, 1334, 38),
woosh.Token(woosh.OP, '(', 1334, 38, 1334, 39),
woosh.Token(woosh.NAME, 'key', 1334, 39, 1334, 42),
woosh.Token(woosh.OP, ',', 1334, 42, 1334, 43),
woosh.Token(woosh.NAME, 'type', 1334, 44, 1334, 48),
woosh.Token(woosh.OP, '(', 1334, 48, 1334, 49),
woosh.Token(woosh.NAME, 'key', 1334, 49, 1334, 52),
woosh.Token(woosh.OP, ')', 1334, 52, 1334, 53),
woosh.Token(woosh.OP, ')', 1334, 53, 1334, 54),
woosh.Token(woosh.OP, ')', 1334, 54, 1334, 55),
woosh.Token(woosh.NEWLINE, '\r\n', 1334, 55, 1335, 0),
woosh.Token(woosh.DEDENT, ' ', 1335, 0, 1335, 8),
woosh.Token(woosh.NAME, 'if', 1335, 8, 1335, 10),
woosh.Token(woosh.NAME, 'k', 1335, 11, 1335, 12),
woosh.Token(woosh.OP, '==', 1335, 13, 1335, 15),
woosh.Token(woosh.STRING, "'get'", 1335, 16, 1335, 21),
woosh.Token(woosh.OP, ':', 1335, 21, 1335, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1335, 22, 1336, 0),
woosh.Token(woosh.INDENT, ' ', 1336, 0, 1336, 12),
woosh.Token(woosh.NAME, 'raise', 1336, 12, 1336, 17),
woosh.Token(woosh.NAME, 'ValueError', 1336, 18, 1336, 28),
woosh.Token(woosh.OP, '(', 1336, 28, 1336, 29),
woosh.Token(woosh.STRING, '\'Incompatible key: cannot use "" as a name\'', 1336, 29, 1336, 72),
woosh.Token(woosh.OP, ')', 1336, 72, 1336, 73),
woosh.Token(woosh.NEWLINE, '\r\n', 1336, 73, 1337, 0),
woosh.Token(woosh.DEDENT, ' ', 1337, 0, 1337, 8),
woosh.Token(woosh.NAME, 'self', 1337, 8, 1337, 12),
woosh.Token(woosh.OP, '.', 1337, 12, 1337, 13),
woosh.Token(woosh.NAME, '_data', 1337, 13, 1337, 18),
woosh.Token(woosh.OP, '[', 1337, 18, 1337, 19),
woosh.Token(woosh.NAME, 'key', 1337, 19, 1337, 22),
woosh.Token(woosh.OP, ']', 1337, 22, 1337, 23),
woosh.Token(woosh.OP, '=', 1337, 24, 1337, 25),
woosh.Token(woosh.NAME, 'value', 1337, 26, 1337, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1337, 31, 1338, 0),
woosh.Token(woosh.NAME, 'func', 1338, 8, 1338, 12),
woosh.Token(woosh.OP, '=', 1338, 13, 1338, 14),
woosh.Token(woosh.NAME, 'functools', 1338, 15, 1338, 24),
woosh.Token(woosh.OP, '.', 1338, 24, 1338, 25),
woosh.Token(woosh.NAME, 'partial', 1338, 25, 1338, 32),
woosh.Token(woosh.OP, '(', 1338, 32, 1338, 33),
woosh.Token(woosh.NAME, 'self', 1338, 33, 1338, 37),
woosh.Token(woosh.OP, '.', 1338, 37, 1338, 38),
woosh.Token(woosh.NAME, '_parser', 1338, 38, 1338, 45),
woosh.Token(woosh.OP, '.', 1338, 45, 1338, 46),
woosh.Token(woosh.NAME, '_get_conv', 1338, 46, 1338, 55),
woosh.Token(woosh.OP, ',', 1338, 55, 1338, 56),
woosh.Token(woosh.NAME, 'conv', 1338, 57, 1338, 61),
woosh.Token(woosh.OP, '=', 1338, 61, 1338, 62),
woosh.Token(woosh.NAME, 'value', 1338, 62, 1338, 67),
woosh.Token(woosh.OP, ')', 1338, 67, 1338, 68),
woosh.Token(woosh.NEWLINE, '\r\n', 1338, 68, 1339, 0),
woosh.Token(woosh.NAME, 'func', 1339, 8, 1339, 12),
woosh.Token(woosh.OP, '.', 1339, 12, 1339, 13),
woosh.Token(woosh.NAME, 'converter', 1339, 13, 1339, 22),
woosh.Token(woosh.OP, '=', 1339, 23, 1339, 24),
woosh.Token(woosh.NAME, 'value', 1339, 25, 1339, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1339, 30, 1340, 0),
woosh.Token(woosh.NAME, 'setattr', 1340, 8, 1340, 15),
woosh.Token(woosh.OP, '(', 1340, 15, 1340, 16),
woosh.Token(woosh.NAME, 'self', 1340, 16, 1340, 20),
woosh.Token(woosh.OP, '.', 1340, 20, 1340, 21),
woosh.Token(woosh.NAME, '_parser', 1340, 21, 1340, 28),
woosh.Token(woosh.OP, ',', 1340, 28, 1340, 29),
woosh.Token(woosh.NAME, 'k', 1340, 30, 1340, 31),
woosh.Token(woosh.OP, ',', 1340, 31, 1340, 32),
woosh.Token(woosh.NAME, 'func', 1340, 33, 1340, 37),
woosh.Token(woosh.OP, ')', 1340, 37, 1340, 38),
woosh.Token(woosh.NEWLINE, '\r\n', 1340, 38, 1341, 0),
woosh.Token(woosh.NAME, 'for', 1341, 8, 1341, 11),
woosh.Token(woosh.NAME, 'proxy', 1341, 12, 1341, 17),
woosh.Token(woosh.NAME, 'in', 1341, 18, 1341, 20),
woosh.Token(woosh.NAME, 'self', 1341, 21, 1341, 25),
woosh.Token(woosh.OP, '.', 1341, 25, 1341, 26),
woosh.Token(woosh.NAME, '_parser', 1341, 26, 1341, 33),
woosh.Token(woosh.OP, '.', 1341, 33, 1341, 34),
woosh.Token(woosh.NAME, 'values', 1341, 34, 1341, 40),
woosh.Token(woosh.OP, '(', 1341, 40, 1341, 41),
woosh.Token(woosh.OP, ')', 1341, 41, 1341, 42),
woosh.Token(woosh.OP, ':', 1341, 42, 1341, 43),
woosh.Token(woosh.NEWLINE, '\r\n', 1341, 43, 1342, 0),
woosh.Token(woosh.INDENT, ' ', 1342, 0, 1342, 12),
woosh.Token(woosh.NAME, 'getter', 1342, 12, 1342, 18),
woosh.Token(woosh.OP, '=', 1342, 19, 1342, 20),
woosh.Token(woosh.NAME, 'functools', 1342, 21, 1342, 30),
woosh.Token(woosh.OP, '.', 1342, 30, 1342, 31),
woosh.Token(woosh.NAME, 'partial', 1342, 31, 1342, 38),
woosh.Token(woosh.OP, '(', 1342, 38, 1342, 39),
woosh.Token(woosh.NAME, 'proxy', 1342, 39, 1342, 44),
woosh.Token(woosh.OP, '.', 1342, 44, 1342, 45),
woosh.Token(woosh.NAME, 'get', 1342, 45, 1342, 48),
woosh.Token(woosh.OP, ',', 1342, 48, 1342, 49),
woosh.Token(woosh.NAME, '_impl', 1342, 50, 1342, 55),
woosh.Token(woosh.OP, '=', 1342, 55, 1342, 56),
woosh.Token(woosh.NAME, 'func', 1342, 56, 1342, 60),
woosh.Token(woosh.OP, ')', 1342, 60, 1342, 61),
woosh.Token(woosh.NEWLINE, '\r\n', 1342, 61, 1343, 0),
woosh.Token(woosh.NAME, 'setattr', 1343, 12, 1343, 19),
woosh.Token(woosh.OP, '(', 1343, 19, 1343, 20),
woosh.Token(woosh.NAME, 'proxy', 1343, 20, 1343, 25),
woosh.Token(woosh.OP, ',', 1343, 25, 1343, 26),
woosh.Token(woosh.NAME, 'k', 1343, 27, 1343, 28),
woosh.Token(woosh.OP, ',', 1343, 28, 1343, 29),
woosh.Token(woosh.NAME, 'getter', 1343, 30, 1343, 36),
woosh.Token(woosh.OP, ')', 1343, 36, 1343, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1343, 37, 1344, 0),
woosh.Token(woosh.DEDENT, ' ', 1345, 0, 1345, 4),
woosh.Token(woosh.DEDENT, '', 1345, 4, 1345, 4),
woosh.Token(woosh.NAME, 'def', 1345, 4, 1345, 7),
woosh.Token(woosh.NAME, '__delitem__', 1345, 8, 1345, 19),
woosh.Token(woosh.OP, '(', 1345, 19, 1345, 20),
woosh.Token(woosh.NAME, 'self', 1345, 20, 1345, 24),
woosh.Token(woosh.OP, ',', 1345, 24, 1345, 25),
woosh.Token(woosh.NAME, 'key', 1345, 26, 1345, 29),
woosh.Token(woosh.OP, ')', 1345, 29, 1345, 30),
woosh.Token(woosh.OP, ':', 1345, 30, 1345, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1345, 31, 1346, 0),
woosh.Token(woosh.INDENT, ' ', 1346, 0, 1346, 8),
woosh.Token(woosh.NAME, 'try', 1346, 8, 1346, 11),
woosh.Token(woosh.OP, ':', 1346, 11, 1346, 12),
woosh.Token(woosh.NEWLINE, '\r\n', 1346, 12, 1347, 0),
woosh.Token(woosh.INDENT, ' ', 1347, 0, 1347, 12),
woosh.Token(woosh.NAME, 'k', 1347, 12, 1347, 13),
woosh.Token(woosh.OP, '=', 1347, 14, 1347, 15),
woosh.Token(woosh.STRING, "'get'", 1347, 16, 1347, 21),
woosh.Token(woosh.OP, '+', 1347, 22, 1347, 23),
woosh.Token(woosh.OP, '(', 1347, 24, 1347, 25),
woosh.Token(woosh.NAME, 'key', 1347, 25, 1347, 28),
woosh.Token(woosh.NAME, 'or', 1347, 29, 1347, 31),
woosh.Token(woosh.NAME, 'None', 1347, 32, 1347, 36),
woosh.Token(woosh.OP, ')', 1347, 36, 1347, 37),
woosh.Token(woosh.NEWLINE, '\r\n', 1347, 37, 1348, 0),
woosh.Token(woosh.DEDENT, ' ', 1348, 0, 1348, 8),
woosh.Token(woosh.NAME, 'except', 1348, 8, 1348, 14),
woosh.Token(woosh.NAME, 'TypeError', 1348, 15, 1348, 24),
woosh.Token(woosh.OP, ':', 1348, 24, 1348, 25),
woosh.Token(woosh.NEWLINE, '\r\n', 1348, 25, 1349, 0),
woosh.Token(woosh.INDENT, ' ', 1349, 0, 1349, 12),
woosh.Token(woosh.NAME, 'raise', 1349, 12, 1349, 17),
woosh.Token(woosh.NAME, 'KeyError', 1349, 18, 1349, 26),
woosh.Token(woosh.OP, '(', 1349, 26, 1349, 27),
woosh.Token(woosh.NAME, 'key', 1349, 27, 1349, 30),
woosh.Token(woosh.OP, ')', 1349, 30, 1349, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1349, 31, 1350, 0),
woosh.Token(woosh.DEDENT, ' ', 1350, 0, 1350, 8),
woosh.Token(woosh.NAME, 'del', 1350, 8, 1350, 11),
woosh.Token(woosh.NAME, 'self', 1350, 12, 1350, 16),
woosh.Token(woosh.OP, '.', 1350, 16, 1350, 17),
woosh.Token(woosh.NAME, '_data', 1350, 17, 1350, 22),
woosh.Token(woosh.OP, '[', 1350, 22, 1350, 23),
woosh.Token(woosh.NAME, 'key', 1350, 23, 1350, 26),
woosh.Token(woosh.OP, ']', 1350, 26, 1350, 27),
woosh.Token(woosh.NEWLINE, '\r\n', 1350, 27, 1351, 0),
woosh.Token(woosh.NAME, 'for', 1351, 8, 1351, 11),
woosh.Token(woosh.NAME, 'inst', 1351, 12, 1351, 16),
woosh.Token(woosh.NAME, 'in', 1351, 17, 1351, 19),
woosh.Token(woosh.NAME, 'itertools', 1351, 20, 1351, 29),
woosh.Token(woosh.OP, '.', 1351, 29, 1351, 30),
woosh.Token(woosh.NAME, 'chain', 1351, 30, 1351, 35),
woosh.Token(woosh.OP, '(', 1351, 35, 1351, 36),
woosh.Token(woosh.OP, '(', 1351, 36, 1351, 37),
woosh.Token(woosh.NAME, 'self', 1351, 37, 1351, 41),
woosh.Token(woosh.OP, '.', 1351, 41, 1351, 42),
woosh.Token(woosh.NAME, '_parser', 1351, 42, 1351, 49),
woosh.Token(woosh.OP, ',', 1351, 49, 1351, 50),
woosh.Token(woosh.OP, ')', 1351, 50, 1351, 51),
woosh.Token(woosh.OP, ',', 1351, 51, 1351, 52),
woosh.Token(woosh.NAME, 'self', 1351, 53, 1351, 57),
woosh.Token(woosh.OP, '.', 1351, 57, 1351, 58),
woosh.Token(woosh.NAME, '_parser', 1351, 58, 1351, 65),
woosh.Token(woosh.OP, '.', 1351, 65, 1351, 66),
woosh.Token(woosh.NAME, 'values', 1351, 66, 1351, 72),
woosh.Token(woosh.OP, '(', 1351, 72, 1351, 73),
woosh.Token(woosh.OP, ')', 1351, 73, 1351, 74),
woosh.Token(woosh.OP, ')', 1351, 74, 1351, 75),
woosh.Token(woosh.OP, ':', 1351, 75, 1351, 76),
woosh.Token(woosh.NEWLINE, '\r\n', 1351, 76, 1352, 0),
woosh.Token(woosh.INDENT, ' ', 1352, 0, 1352, 12),
woosh.Token(woosh.NAME, 'try', 1352, 12, 1352, 15),
woosh.Token(woosh.OP, ':', 1352, 15, 1352, 16),
woosh.Token(woosh.NEWLINE, '\r\n', 1352, 16, 1353, 0),
woosh.Token(woosh.INDENT, ' ', 1353, 0, 1353, 16),
woosh.Token(woosh.NAME, 'delattr', 1353, 16, 1353, 23),
woosh.Token(woosh.OP, '(', 1353, 23, 1353, 24),
woosh.Token(woosh.NAME, 'inst', 1353, 24, 1353, 28),
woosh.Token(woosh.OP, ',', 1353, 28, 1353, 29),
woosh.Token(woosh.NAME, 'k', 1353, 30, 1353, 31),
woosh.Token(woosh.OP, ')', 1353, 31, 1353, 32),
woosh.Token(woosh.NEWLINE, '\r\n', 1353, 32, 1354, 0),
woosh.Token(woosh.DEDENT, ' ', 1354, 0, 1354, 12),
woosh.Token(woosh.NAME, 'except', 1354, 12, 1354, 18),
woosh.Token(woosh.NAME, 'AttributeError', 1354, 19, 1354, 33),
woosh.Token(woosh.OP, ':', 1354, 33, 1354, 34),
woosh.Token(woosh.NEWLINE, '\r\n', 1354, 34, 1355, 0),
woosh.Token(woosh.COMMENT, "# don't raise since the entry was present in _data, silently", 1355, 16, 1355, 76),
woosh.Token(woosh.COMMENT, '# clean up', 1356, 16, 1356, 26),
woosh.Token(woosh.INDENT, ' ', 1357, 0, 1357, 16),
woosh.Token(woosh.NAME, 'continue', 1357, 16, 1357, 24),
woosh.Token(woosh.NEWLINE, '\r\n', 1357, 24, 1358, 0),
woosh.Token(woosh.DEDENT, ' ', 1359, 0, 1359, 4),
woosh.Token(woosh.DEDENT, '', 1359, 4, 1359, 4),
woosh.Token(woosh.DEDENT, '', 1359, 4, 1359, 4),
woosh.Token(woosh.NAME, 'def', 1359, 4, 1359, 7),
woosh.Token(woosh.NAME, '__iter__', 1359, 8, 1359, 16),
woosh.Token(woosh.OP, '(', 1359, 16, 1359, 17),
woosh.Token(woosh.NAME, 'self', 1359, 17, 1359, 21),
woosh.Token(woosh.OP, ')', 1359, 21, 1359, 22),
woosh.Token(woosh.OP, ':', 1359, 22, 1359, 23),
woosh.Token(woosh.NEWLINE, '\r\n', 1359, 23, 1360, 0),
woosh.Token(woosh.INDENT, ' ', 1360, 0, 1360, 8),
woosh.Token(woosh.NAME, 'return', 1360, 8, 1360, 14),
woosh.Token(woosh.NAME, 'iter', 1360, 15, 1360, 19),
woosh.Token(woosh.OP, '(', 1360, 19, 1360, 20),
woosh.Token(woosh.NAME, 'self', 1360, 20, 1360, 24),
woosh.Token(woosh.OP, '.', 1360, 24, 1360, 25),
woosh.Token(woosh.NAME, '_data', 1360, 25, 1360, 30),
woosh.Token(woosh.OP, ')', 1360, 30, 1360, 31),
woosh.Token(woosh.NEWLINE, '\r\n', 1360, 31, 1361, 0),
woosh.Token(woosh.DEDENT, ' ', 1362, 0, 1362, 4),
woosh.Token(woosh.NAME, 'def', 1362, 4, 1362, 7),
woosh.Token(woosh.NAME, '__len__', 1362, 8, 1362, 15),
woosh.Token(woosh.OP, '(', 1362, 15, 1362, 16),
woosh.Token(woosh.NAME, 'self', 1362, 16, 1362, 20),
woosh.Token(woosh.OP, ')', 1362, 20, 1362, 21),
woosh.Token(woosh.OP, ':', 1362, 21, 1362, 22),
woosh.Token(woosh.NEWLINE, '\r\n', 1362, 22, 1363, 0),
woosh.Token(woosh.INDENT, ' ', 1363, 0, 1363, 8),
woosh.Token(woosh.NAME, 'return', 1363, 8, 1363, 14),
woosh.Token(woosh.NAME, 'len', 1363, 15, 1363, 18),
woosh.Token(woosh.OP, '(', 1363, 18, 1363, 19),
woosh.Token(woosh.NAME, 'self', 1363, 19, 1363, 23),
woosh.Token(woosh.OP, '.', 1363, 23, 1363, 24),
woosh.Token(woosh.NAME, '_data', 1363, 24, 1363, 29),
woosh.Token(woosh.OP, ')', 1363, 29, 1363, 30),
woosh.Token(woosh.NEWLINE, '\r\n', 1363, 30, 1364, 0),
woosh.Token(woosh.DEDENT, '', 1364, 0, 1364, 0),
woosh.Token(woosh.DEDENT, '', 1364, 0, 1364, 0),
woosh.Token(woosh.EOF, '', 1364, 0, 1364, 0),
]
| 53.834885
| 6,769
| 0.624377
| 67,382
| 399,078
| 3.68732
| 0.027708
| 0.297192
| 0.445788
| 0.223526
| 0.741965
| 0.344379
| 0.118136
| 0.023123
| 0.018055
| 0.016313
| 0
| 0.220295
| 0.136166
| 399,078
| 7,412
| 6,770
| 53.842148
| 0.500425
| 0.0002
| 0
| 0.007161
| 1
| 0.105256
| 0.813868
| 0.351473
| 0
| 0
| 0
| 0
| 0.000135
| 1
| 0.000405
| false
| 0.000676
| 0.001757
| 0.00027
| 0.002432
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69c504c45b696555d86da05d74e532df9730d910
| 8,794
|
py
|
Python
|
tests/integration/offer/percentage_benefit_tests.py
|
endgame/django-oscar
|
e5d78436e20b55902537a6cc82edf4e22568f9d6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/offer/percentage_benefit_tests.py
|
endgame/django-oscar
|
e5d78436e20b55902537a6cc82edf4e22568f9d6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/offer/percentage_benefit_tests.py
|
endgame/django-oscar
|
e5d78436e20b55902537a6cc82edf4e22568f9d6
|
[
"BSD-3-Clause"
] | 1
|
2019-07-10T06:32:14.000Z
|
2019-07-10T06:32:14.000Z
|
from decimal import Decimal as D
from django.test import TestCase
from django_dynamic_fixture import G
from oscar.apps.offer import models
from oscar.apps.basket.models import Basket
from oscar_testsupport.factories import create_product
class TestAPercentageDiscountAppliedWithCountCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.CountCondition.objects.create(
range=range,
type=models.Condition.COUNT,
value=2)
self.benefit = models.PercentageDiscountBenefit.objects.create(
range=range,
type=models.Benefit.PERCENTAGE,
value=20)
self.basket = G(Basket)
def test_applies_correctly_to_empty_basket(self):
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(D('0.00'), discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_with_no_discountable_products(self):
for product in [create_product(price=D('12.00'))]:
product.is_discountable = False
product.save()
self.basket.add_product(product, 2)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(D('0.00'), discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(2, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
for product in [create_product(price=D('12.00'))]:
self.basket.add_product(product, 2)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(2 * D('12.00') * D('0.2'), discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
for product in [create_product(price=D('12.00'))]:
self.basket.add_product(product, 3)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(3 * D('12.00') * D('0.2'), discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
class TestAPercentageDiscountWithMaxItemsSetAppliedWithCountCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.CountCondition.objects.create(
range=range,
type=models.Condition.COUNT,
value=2)
self.benefit = models.PercentageDiscountBenefit.objects.create(
range=range,
type=models.Benefit.PERCENTAGE,
value=20,
max_affected_items=1)
self.basket = G(Basket)
def test_applies_correctly_to_empty_basket(self):
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(D('0.00'), discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
for product in [create_product(price=D('12.00'))]:
self.basket.add_product(product, 2)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(1 * D('12.00') * D('0.2'), discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
for product in [create_product(price=D('12.00')),
create_product(price=D('20.00'))]:
self.basket.add_product(product, 2)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(1 * D('12.00') * D('0.2'), discount)
# Should only consume the condition products
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(2, self.basket.num_items_without_discount)
class TestAPercentageDiscountAppliedWithValueCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.ValueCondition.objects.create(
range=range,
type=models.Condition.VALUE,
value=D('10.00'))
self.benefit = models.PercentageDiscountBenefit.objects.create(
range=range,
type=models.Benefit.PERCENTAGE,
value=20)
self.basket = G(Basket)
def test_applies_correctly_to_empty_basket(self):
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(D('0.00'), discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
for product in [create_product(price=D('5.00'))]:
self.basket.add_product(product, 2)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(2 * D('5.00') * D('0.2'), discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition_but_matches_on_boundary(self):
for product in [create_product(price=D('5.00'))]:
self.basket.add_product(product, 3)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(3 * D('5.00') * D('0.2'), discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
for product in [create_product(price=D('4.00'))]:
self.basket.add_product(product, 3)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(3 * D('4.00') * D('0.2'), discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
class TestAPercentageDiscountWithMaxItemsSetAppliedWithValueCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.ValueCondition.objects.create(
range=range,
type=models.Condition.VALUE,
value=D('10.00'))
self.benefit = models.PercentageDiscountBenefit.objects.create(
range=range,
type=models.Benefit.PERCENTAGE,
value=20,
max_affected_items=1)
self.basket = G(Basket)
def test_applies_correctly_to_empty_basket(self):
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(D('0.00'), discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
for product in [create_product(price=D('5.00'))]:
self.basket.add_product(product, 2)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(1 * D('5.00') * D('0.2'), discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition_but_matches_on_boundary(self):
for product in [create_product(price=D('5.00'))]:
self.basket.add_product(product, 3)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(1 * D('5.00') * D('0.2'), discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(1, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
for product in [create_product(price=D('4.00'))]:
self.basket.add_product(product, 3)
discount = self.benefit.apply(self.basket, self.condition)
self.assertEqual(1 * D('4.00') * D('0.2'), discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
| 46.041885
| 95
| 0.68922
| 1,103
| 8,794
| 5.281052
| 0.077969
| 0.103004
| 0.066953
| 0.092704
| 0.905579
| 0.905579
| 0.905579
| 0.904721
| 0.904721
| 0.904206
| 0
| 0.02593
| 0.201842
| 8,794
| 190
| 96
| 46.284211
| 0.803961
| 0.004776
| 0
| 0.8625
| 0
| 0
| 0.023543
| 0
| 0
| 0
| 0
| 0
| 0.28125
| 1
| 0.11875
| false
| 0
| 0.0375
| 0
| 0.18125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69d3b3e4ca766145250debf66660939851b31d54
| 2,355
|
py
|
Python
|
workflow/migrations/0011_auto_20180110_0509.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0011_auto_20180110_0509.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0011_auto_20180110_0509.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-01-10 13:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0010_auto_20180109_0650'),
]
operations = [
migrations.AlterField(
model_name='widget',
name='h',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='w',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='x',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='xLg',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='xMd',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='xSm',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='xXl',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='y',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='yLg',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='yMd',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='ySm',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='widget',
name='yXl',
field=models.IntegerField(blank=True, default=0, null=True),
),
]
| 30.986842
| 72
| 0.545648
| 229
| 2,355
| 5.524017
| 0.240175
| 0.189723
| 0.237154
| 0.275099
| 0.825296
| 0.825296
| 0.794466
| 0.794466
| 0.794466
| 0.756522
| 0
| 0.028409
| 0.327389
| 2,355
| 75
| 73
| 31.4
| 0.770202
| 0.028875
| 0
| 0.705882
| 1
| 0
| 0.057356
| 0.01007
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
69dcf94dd33b387cbb9b7fb4acac0096936bfcf3
| 18,749
|
py
|
Python
|
src/taxi_in/pipelines/unimpeded/data_science/nodes.py
|
nasa/ML-airport-taxi-in
|
49d9cb3b4f94320102146a2c12ff1c029cb98879
|
[
"NASA-1.3"
] | 1
|
2021-10-11T06:08:59.000Z
|
2021-10-11T06:08:59.000Z
|
src/taxi_in/pipelines/unimpeded/data_science/nodes.py
|
nasa/ML-airport-taxi-in
|
49d9cb3b4f94320102146a2c12ff1c029cb98879
|
[
"NASA-1.3"
] | null | null | null |
src/taxi_in/pipelines/unimpeded/data_science/nodes.py
|
nasa/ML-airport-taxi-in
|
49d9cb3b4f94320102146a2c12ff1c029cb98879
|
[
"NASA-1.3"
] | null | null | null |
"""Data science nodes for unimpeded model development
"""
from typing import Any, Dict, Tuple
import pandas as pd
import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
import xgboost as xgb
from sklearn.compose import TransformedTargetRegressor
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import OneHotEncoder
from sklearn.pipeline import Pipeline as sklearn_Pipeline
from data_services.mlflow_utils import add_environment_specs_to_conda_file
import time
import logging
import mlflow
from mlflow import sklearn as mlf_sklearn
from data_services.stand_cluster_encoder import StandEncoder
from ...data_science.error_metrics import METRIC_NAME_TO_FUNCTION_DICT
from ...data_science.evaluation_utils import evaluate_predictions
from data_services.FilterPipeline import FilterPipeline
from data_services.OrderFeatures import OrderFeatures
def train_unimp_ramp_model(
data: pd.DataFrame,
model_params: Dict[str, Any],
global_params: Dict[str, Any],
active_run_id: str,
) -> sklearn_Pipeline:
features_transforms = list()
for feature in model_params['features']:
feature_Pipeline_steps = list()
if feature not in model_params['features_core']:
# Impute to fill in nan
impute_nan_w_None = SimpleImputer(
missing_values=np.nan,
strategy='constant',
fill_value=None,
)
feature_Pipeline_steps.append((
'impute_nan_w_None',
impute_nan_w_None
))
# Impute to fill in empty strings ('')
impute_empty_string_w_None = SimpleImputer(
missing_values='',
strategy='constant',
fill_value=None,
)
feature_Pipeline_steps.append((
'impute_empty_string_w_None',
impute_empty_string_w_None
))
if feature in model_params['OneHotEncoder_features']:
one_hot_enc = OneHotEncoder(
sparse=False,
handle_unknown="ignore",
)
feature_Pipeline_steps.append((
'one_hot_enc',
one_hot_enc
))
if feature == 'arrival_stand_actual':
stand_encoder = StandEncoder()
feature_Pipeline_steps.append((
'stand_encoder',
stand_encoder
))
feature_Pipeline = sklearn_Pipeline(feature_Pipeline_steps)
feature_transforms = (feature, feature_Pipeline, [feature])
features_transforms.append(feature_transforms)
col_transformer = ColumnTransformer(
transformers=features_transforms,
remainder='passthrough',
sparse_threshold=0,
)
# Orders feature columns
order_features = OrderFeatures()
# Make model
if model_params['model'] == 'GradientBoostingRegressor':
model = GradientBoostingRegressor(**model_params['model_params'])
else:
pass
# Add rounding of model result to nearest integer
# by somewhat mis-using a transformed target regressor
# inputs should be integer number of seconds already
model = TransformedTargetRegressor(
regressor=model,
inverse_func=lambda x: np.round(x),
check_inverse=False,
)
# Make pipeline
pipeline = sklearn_Pipeline(
steps=[
('order_features', order_features),
('col_transformer', col_transformer),
('model', model),
]
)
# Add wrapper to skip model and return default for core features, and target values not satisfying the defined rules
# default is set to the quantile seek by the model or the median of the value if no quantile requested
if ((model_params['model'] == 'GradientBoostingRegressor') and
('loss' in model_params['model_params']) and
(model_params['model_params']['loss'] == 'quantile')) :
default_response = np.nanquantile(data.loc[(data.group == 'train'), model_params['target']],
model_params['model_params']['alpha'])
else :
default_response = np.nanmedian(data.loc[(data.group == 'train'), model_params['target']])
filt_pipeline = FilterPipeline(pipeline, default_response, print_debug=True)
missing_values = [np.nan, None, '']
# StandEncoder can handle unseen categories : put 0 in all known terminal columns
# Only one-hot-encoded features can not support unseen cat. for now, add problematic encoders here :
no_unknown_features = model_params['OneHotEncoder_features']
for feature_name in model_params['features_core']:
excluded_values = missing_values
if (feature_name in no_unknown_features) :
feature_values = data.loc[(data.group == 'train') & (data[feature_name].notnull()), feature_name].unique().tolist()
# Rules flagging unknown values (ignoring missing values)
filt_pipeline.add_include_rule(feature_name, feature_values + missing_values, 'Unknown ' + feature_name)
if (feature_name in global_params['category_exclusions']) :
excluded_values = missing_values + global_params['category_exclusions'][feature_name]
# Rules flagging missing values and excluded
filt_pipeline.add_exclude_rule(feature_name, excluded_values, 'Missing/Excluded ' + feature_name)
# Rules flagging invalid predictions/target
filt_pipeline.add_exclude_rule_preds(lambda x: x < 0, 'Negative prediction')
# Train pipeline
tic = time.time()
filt_pipeline.fit(
data.loc[
(data.group == 'train'),
model_params['features']
],
data.loc[
(data.group == 'train'),
model_params['target']
],
)
toc = time.time()
log = logging.getLogger(__name__)
log.info('training unimpeded ramp model took {:.1f} minutes'.format(
(toc - tic) / 60)
)
with mlflow.start_run(run_id=active_run_id):
# Log trained model
mlf_sklearn.log_model(
sk_model=filt_pipeline,
artifact_path='model',
conda_env=add_environment_specs_to_conda_file())
# Set tags
mlflow.set_tag('airport_icao', global_params['airport_icao'])
mlflow.set_tag('Model Version', 0)
# Log model parameters one at a time so that character limit is
# 500 instead of 250
for key, value in model_params.items():
mlflow.log_param(key, value)
mlflow.log_params(global_params)
return filt_pipeline
def train_unimp_ama_model(
data: pd.DataFrame,
model_params: Dict[str, Any],
global_params: Dict[str, Any],
active_run_id: str,
) -> sklearn_Pipeline:
features_transforms = list()
for feature in model_params['features']:
feature_Pipeline_steps = list()
if feature not in model_params['features_core']:
# Impute to fill in nan
impute_nan_w_None = SimpleImputer(
missing_values=np.nan,
strategy='constant',
fill_value=None,
)
feature_Pipeline_steps.append((
'impute_nan_w_None',
impute_nan_w_None
))
# Impute to fill in empty strings ('')
impute_empty_string_w_None = SimpleImputer(
missing_values='',
strategy='constant',
fill_value=None,
)
feature_Pipeline_steps.append((
'impute_empty_string_w_None',
impute_empty_string_w_None
))
if feature in model_params['OneHotEncoder_features']:
one_hot_enc = OneHotEncoder(
sparse=False,
handle_unknown='ignore',
)
feature_Pipeline_steps.append((
'one_hot_enc',
one_hot_enc
))
if feature == 'arrival_stand_actual':
stand_encoder = StandEncoder()
feature_Pipeline_steps.append((
'stand_encoder',
stand_encoder
))
feature_Pipeline = sklearn_Pipeline(feature_Pipeline_steps)
feature_transforms = (feature, feature_Pipeline, [feature])
features_transforms.append(feature_transforms)
col_transformer = ColumnTransformer(
transformers=features_transforms,
remainder='passthrough',
sparse_threshold=0,
)
# Orders feature columns
order_features = OrderFeatures()
if model_params['model'] == 'XGBRegressor':
model = xgb.XGBRegressor(**model_params['model_params'])
else:
pass
# Add rounding of model result to nearest integer
# by somewhat mis-using a transformed target regressor
# inputs should be integer number of seconds already
model = TransformedTargetRegressor(
regressor=model,
inverse_func=lambda x: np.round(x),
check_inverse=False,
)
# Make pipeline
pipeline = sklearn_Pipeline(
steps=[
('order_features', order_features),
('col_transformer', col_transformer),
('model', model),
]
)
# Add wrapper to skip model and return default for core features, and target values not satisfying the defined rules
# default is set to the median of the target value
default_response = np.nanmedian(data.loc[(data.unimpeded_AMA) & (data.group == 'train'), model_params['target']])
filt_pipeline = FilterPipeline(pipeline, default_response, print_debug=True)
missing_values = [np.nan, None,'']
# StandEncoder can handle unseen categories : put 0 in all known terminal columns
# Only one-hot-encoded features can not support unseen cat. for now, add problematic encoders here :
no_unknown_features = model_params['OneHotEncoder_features']
for feature_name in model_params['features_core']:
excluded_values = missing_values
if (feature_name in no_unknown_features) :
feature_values = data.loc[(data.group == 'train') & (data[feature_name].notnull()), feature_name].unique().tolist()
# Rules flagging unknown values (ignoring missing values)
filt_pipeline.add_include_rule(feature_name, feature_values + missing_values, 'Unknown ' + feature_name)
if (feature_name in global_params['category_exclusions']) :
excluded_values = missing_values + global_params['category_exclusions'][feature_name]
# Rules flagging missing values and excluded
filt_pipeline.add_exclude_rule(feature_name, excluded_values, 'Missing/Excluded ' + feature_name)
# Rules flagging invalid predictions/target
filt_pipeline.add_exclude_rule_preds(lambda x: x < 0, 'Negative prediction')
# Train pipeline
tic = time.time()
filt_pipeline.fit(
data.loc[
(data.unimpeded_AMA) & (data.group == 'train'),
model_params['features']
],
data.loc[
(data.unimpeded_AMA) & (data.group == 'train'),
model_params['target']
],
)
toc = time.time()
log = logging.getLogger(__name__)
log.info('training unimpeded AMA model with {} unimpeded trajectories took {:.1f} minutes'.format(
((data.unimpeded_AMA) & (data.group == 'train')).sum(),
(toc - tic) / 60)
)
with mlflow.start_run(run_id=active_run_id):
# Log trained model
mlf_sklearn.log_model(
sk_model=filt_pipeline,
artifact_path='model',
conda_env=add_environment_specs_to_conda_file())
# Set tags
mlflow.set_tag('airport_icao', global_params['airport_icao'])
mlflow.set_tag('Model Version', 0)
# Log model parameters one at a time so that character limit is
# 500 instead of 250
for key, value in model_params.items():
mlflow.log_param(key, value)
mlflow.log_params(global_params)
return filt_pipeline
def report_performance_metrics_ama(
data: pd.DataFrame,
model_params: Dict[str, Any],
active_run_id: str,
) -> bool:
"""Node for reporting performance metrics. Notice that this function has no
outputs.
"""
# Predictive model
report_model_metrics_ama(data,
model_params,
'predicted_{}'.format(model_params['name']),
active_run_id)
# Baseline
if 'predicted_baseline' in data.columns:
report_model_metrics_ama(data,
model_params,
'predicted_baseline',
active_run_id,
'baseline_',
['test'])
# STBO as truth data
if ('label' in model_params) :
if 'undelayed_arrival_{}_transit_time'.format(model_params['label']) in data.columns:
report_model_metrics_ama_STBO(data,
model_params,
'predicted_{}'.format(model_params['name']),
active_run_id,
'STBO_',
['test'])
return True
def report_model_metrics_ama(
data: pd.DataFrame,
model_params: Dict[str, Any],
y_pred: str,
active_run_id: str,
name_prefix: str = '',
group_values: list = ['train', 'test']
) -> None:
"""Node for reporting the performance metrics of the predictions performed
by the previous node. Notice that this function has no outputs, except
logging.
Primarily evaluates with the unimpeded_AMA trajectories, except for
'fraction_less_than_actual', which uses all trajectories
"""
metrics_dict = {
metric_name: METRIC_NAME_TO_FUNCTION_DICT[metric_name]
for metric_name in model_params['metrics']
if metric_name != 'fraction_less_than_actual'
}
evaluation_df = evaluate_predictions(
data[data.unimpeded_AMA & (data.missing_core_features == False) &
(data['predicted_{}'.format(model_params['name'])].isna() == False) &
data.group.isin(group_values)],
y_true=model_params['target'],
y_pred=y_pred,
metrics_dict=metrics_dict,
)
if 'fraction_less_than_actual' in model_params['metrics']:
evaluation_df_lt_actual = evaluate_predictions(
data,
y_true=model_params['target'],
y_pred=y_pred,
metrics_dict={
'fraction_less_than_actual': METRIC_NAME_TO_FUNCTION_DICT['fraction_less_than_actual']
},
)
evaluation_df = pd.concat(
[evaluation_df, evaluation_df_lt_actual],
axis=1,
)
# Log the accuracy of the model
log = logging.getLogger(__name__)
# Set active ML Flow Run
with mlflow.start_run(run_id=active_run_id):
for metric_name in model_params['metrics']:
log.info("metric {}:".format(name_prefix + metric_name))
for group in [v for v in data.group.unique() if v in group_values]:
log.info("{} group: {}".format(
group,
evaluation_df.loc[group, metric_name]
))
mlflow.log_metric(
name_prefix + metric_name + '_' + group,
evaluation_df.loc[group, metric_name]
)
def report_model_metrics_ama_STBO(
data: pd.DataFrame,
model_params: Dict[str, Any],
y_pred: str,
active_run_id: str,
name_prefix: str = '',
group_values : list = ['train','test']
) -> None:
"""Node for reporting the performance metrics of the predictions performed
by the previous node. Notice that this function has no outputs, except
logging.
Primarily evaluates with the unimpeded_AMA trajectories, except for
'fraction_less_than_actual', which uses all trajectories
"""
metrics_dict = {
metric_name: METRIC_NAME_TO_FUNCTION_DICT[metric_name]
for metric_name in model_params['metrics']
if metric_name != 'fraction_less_than_actual'
}
if 'undelayed_arrival_{}_transit_time'.format(model_params['label']) in data.columns:
data_filtered = data[(data.unimpeded_AMA) & (data.missing_core_features == False) &
(data['predicted_{}'.format(model_params['name'])].isna() == False) &
(data['predicted_baseline'].isna() == False) &
(data['undelayed_arrival_{}_transit_time'.format(model_params['label'])].isna() == False) &
data.group.isin(group_values)]
evaluation_df_STBO = evaluate_predictions(
data_filtered,
y_true='undelayed_arrival_{}_transit_time'.format(model_params['label']),
y_pred=y_pred,
metrics_dict=metrics_dict,
)
if 'fraction_less_than_actual' in model_params['metrics']:
evaluation_df_lt_actual = evaluate_predictions(
data,
y_true='undelayed_arrival_{}_transit_time'.format(model_params['label']),
y_pred=y_pred,
metrics_dict={
'fraction_less_than_actual': METRIC_NAME_TO_FUNCTION_DICT['fraction_less_than_actual']
},
)
evaluation_df_STBO = pd.concat(
[evaluation_df_STBO, evaluation_df_lt_actual],
axis=1,
)
# Log the accuracy of the model
log = logging.getLogger(__name__)
if (active_run_id != None) :
with mlflow.start_run(run_id=active_run_id):
# Set the metrics
# for metric_name in metrics_dict.keys():
# use evaluation_df to get the unimpeded_AMA metrics as well (if calculated)
if 'undelayed_arrival_{}_transit_time'.format(model_params['label']) in data.columns:
for metric_name in evaluation_df_STBO.keys() :
log.info("metric {}:".format(name_prefix + metric_name))
for group in [v for v in data.group.unique() if v in group_values]:
log.info("{} group: {}".format(
group,
evaluation_df_STBO.loc[group, metric_name]
))
mlflow.log_metric(
name_prefix + metric_name + '_' + group,
evaluation_df_STBO.loc[group, metric_name]
)
| 37.200397
| 127
| 0.620673
| 2,072
| 18,749
| 5.319981
| 0.137066
| 0.057879
| 0.020049
| 0.019958
| 0.849587
| 0.838066
| 0.828903
| 0.808582
| 0.790438
| 0.780278
| 0
| 0.002108
| 0.291429
| 18,749
| 503
| 128
| 37.274354
| 0.827625
| 0.145128
| 0
| 0.73913
| 0
| 0
| 0.111768
| 0.036963
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013587
| false
| 0.01087
| 0.054348
| 0
| 0.076087
| 0.005435
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69ea5eee3ffc4e3cd977207e8fb3539769c8a403
| 115
|
py
|
Python
|
pybryt/complexities.py
|
chrispyles/pybryt
|
23f8bfce3179638ec5b4efe3555d3bb4e7321dc0
|
[
"MIT"
] | 41
|
2021-02-17T20:45:17.000Z
|
2022-03-16T19:39:00.000Z
|
pybryt/complexities.py
|
chrispyles/pybryt
|
23f8bfce3179638ec5b4efe3555d3bb4e7321dc0
|
[
"MIT"
] | 120
|
2021-03-05T01:11:45.000Z
|
2022-03-23T04:08:58.000Z
|
pybryt/complexities.py
|
chrispyles/pybryt
|
23f8bfce3179638ec5b4efe3555d3bb4e7321dc0
|
[
"MIT"
] | 18
|
2021-02-14T04:27:34.000Z
|
2022-03-03T07:29:49.000Z
|
"""Alias for ``pybryt.annotations.complexity.complexities``"""
from .annotations.complexity.complexities import *
| 28.75
| 62
| 0.782609
| 11
| 115
| 8.181818
| 0.727273
| 0.466667
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069565
| 115
| 3
| 63
| 38.333333
| 0.841122
| 0.486957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0e06ddf02146a337293ff84f99d6da9caba973ed
| 274
|
py
|
Python
|
src/android/toga_android/libs/androidx/swiperefreshlayout.py
|
luizoti/toga
|
3c49e685f325f1aba2ce048b253402d7e4519f97
|
[
"BSD-3-Clause"
] | 1,261
|
2019-03-31T16:28:47.000Z
|
2022-03-31T09:01:23.000Z
|
src/android/toga_android/libs/androidx/swiperefreshlayout.py
|
luizoti/toga
|
3c49e685f325f1aba2ce048b253402d7e4519f97
|
[
"BSD-3-Clause"
] | 597
|
2019-04-02T20:02:42.000Z
|
2022-03-30T10:28:47.000Z
|
src/android/toga_android/libs/androidx/swiperefreshlayout.py
|
luizoti/toga
|
3c49e685f325f1aba2ce048b253402d7e4519f97
|
[
"BSD-3-Clause"
] | 318
|
2019-03-31T18:32:00.000Z
|
2022-03-30T18:07:13.000Z
|
from rubicon.java import JavaClass, JavaInterface
SwipeRefreshLayout = JavaClass("androidx/swiperefreshlayout/widget/SwipeRefreshLayout")
SwipeRefreshLayout__OnRefreshListener = JavaInterface(
"androidx/swiperefreshlayout/widget/SwipeRefreshLayout$OnRefreshListener"
)
| 39.142857
| 87
| 0.864964
| 20
| 274
| 11.75
| 0.5
| 0.221277
| 0.27234
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065693
| 274
| 6
| 88
| 45.666667
| 0.917969
| 0
| 0
| 0
| 0
| 0
| 0.452555
| 0.452555
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e123292b4cddb6b0ad7992a6dd0570ad66e713b
| 1,134
|
py
|
Python
|
src/sst/elements/simpleElementExample/tests/subcomponent_tests/legacy/refFiles/test_sc_legacy_2n2n.py
|
Xiaoyang-Lu/sst-elements
|
7946241e9f5a57a0bfdbfbf8452deacb1c3a9051
|
[
"BSD-3-Clause"
] | null | null | null |
src/sst/elements/simpleElementExample/tests/subcomponent_tests/legacy/refFiles/test_sc_legacy_2n2n.py
|
Xiaoyang-Lu/sst-elements
|
7946241e9f5a57a0bfdbfbf8452deacb1c3a9051
|
[
"BSD-3-Clause"
] | null | null | null |
src/sst/elements/simpleElementExample/tests/subcomponent_tests/legacy/refFiles/test_sc_legacy_2n2n.py
|
Xiaoyang-Lu/sst-elements
|
7946241e9f5a57a0bfdbfbf8452deacb1c3a9051
|
[
"BSD-3-Clause"
] | null | null | null |
Loader0:mySubComp[0]:mySubCompSlot[0].numSent : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader0:mySubComp[0]:mySubCompSlot[1].numSent : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader0:mySubComp[1]:mySubCompSlot[0].numSent : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader0:mySubComp[1]:mySubCompSlot[1].numSent : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader1:mySubComp[0]:mySubCompSlot[0].numRecv : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader1:mySubComp[0]:mySubCompSlot[1].numRecv : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader1:mySubComp[1]:mySubCompSlot[0].numRecv : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader1:mySubComp[1]:mySubCompSlot[1].numRecv : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Simulation is complete, simulated time: 10 us
| 113.4
| 135
| 0.674603
| 183
| 1,134
| 4.180328
| 0.131148
| 0.104575
| 0.177778
| 0.198693
| 0.928105
| 0.928105
| 0.928105
| 0.928105
| 0.928105
| 0.928105
| 0
| 0.177268
| 0.154321
| 1,134
| 9
| 136
| 126
| 0.620438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3875e9a5977d7e95f8c77afab19abb9b7deed5e8
| 196,710
|
py
|
Python
|
tests/core/test_eventing.py
|
repsistance/keripy
|
655c5625b5593844c405e7da5d4b089153b64efb
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_eventing.py
|
repsistance/keripy
|
655c5625b5593844c405e7da5d4b089153b64efb
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_eventing.py
|
repsistance/keripy
|
655c5625b5593844c405e7da5d4b089153b64efb
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
"""
tests.core.test_eventing module
"""
import os
import pytest
import pysodium
import blake3
from math import ceil
from keri.kering import Version
from keri.kering import (ValidationError, EmptyMaterialError, DerivationError,
ShortageError)
from keri.core.coring import MtrDex, Matter, IdrDex, Indexer, CtrDex, Counter
from keri.core.coring import Seqner, Verfer, Signer, Diger, Nexter, Prefixer
from keri.core.coring import Salter, Serder, Siger, Cigar
from keri.core.coring import Ilks
from keri.core.eventing import (TraitDex, LastEstLoc, Serials, Versify,
simple, ample)
from keri.core.eventing import (deWitnessCouple, deReceiptCouple, deSourceCouple,
deReceiptTriple,
deTransReceiptQuadruple, deTransReceiptQuintuple)
from keri.core.eventing import (SealDigest, SealRoot, SealEvent, SealLocation,
StateEvent, StateEstEvent)
from keri.core.eventing import (incept, rotate, interact, receipt,
delcept, deltate, state, messagize)
from keri.core.eventing import Kever, Kevery, Parser
from keri.db.dbing import dgKey, snKey, openDB, Baser
from keri.base.keeping import openKS, Manager
from keri import help
logger = help.ogler.getLogger()
def test_simple():
"""
test simple majority function
"""
assert simple(-2) == 0
assert simple(-1) == 0
assert simple(0) == 0
assert simple(1) == 1
assert simple(2) == 2
assert simple(3) == 2
assert simple(4) == 3
assert simple(5) == 3
assert simple(6) == 4
def test_ample():
"""
test ample majority function (sufficient immune majority)
"""
assert ample(0) == 0
assert ample(0, weak=False) == 0
assert ample(0, f=0) == 0
assert ample(0, f=0, weak=False) == 0
assert ample(0, f=1) == 0
assert ample(0, f=1, weak=False) == 0
assert ample(1) == 1
assert ample(1, weak=False) == 1
with pytest.raises(ValueError):
assert ample(1, f=1) == 1
with pytest.raises(ValueError):
assert ample(1, f=1, weak=False) == 1
assert ample(2) == 2
assert ample(2, weak=False) == 2
with pytest.raises(ValueError):
assert ample(2, f=1) == 2
with pytest.raises(ValueError):
assert ample(2, f=1, weak=False) == 2
assert ample(3) == 3
assert ample(3, weak=False) == 3
with pytest.raises(ValueError):
assert ample(3, f=1) == 3
with pytest.raises(ValueError):
assert ample(3, f=1) == 3
assert ample(4) == 3
assert ample(4, weak=False) == 3
assert ample(4, f=1) == 3
assert ample(4, f=1) == 3
assert ample(5) == 4
assert ample(5, weak=False) == 4
assert ample(5, f=1) == 4
assert ample(5, f=1) == 4
assert ample(6) == 4
assert ample(6, weak=False) == 5
assert ample(6, f=1) == 4
assert ample(6, f=1, weak=False) == 5
assert ample(7) == 5
assert ample(7, weak=False) == 5
assert ample(7, f=2) == 5
assert ample(7, f=2, weak=False) == 5
assert ample(8) == 6
assert ample(8, weak=False) == 6
assert ample(8, f=2) == 6
assert ample(8, f=2, weak=False) == 6
assert ample(9) == 6
assert ample(9, weak=False) == 7
assert ample(9, f=2) == 6
assert ample(9, f=2, weak=False) == 7
assert ample(10) == 7
assert ample(10, weak=False) == 7
assert ample(10, f=3) == 7
assert ample(10, f=3, weak=False) == 7
assert ample(11) == 8
assert ample(11, weak=False) == 8
assert ample(11, f=3) == 8
assert ample(11, f=3, weak=False) == 8
assert ample(12) == 8
assert ample(12, weak=False) == 9
assert ample(12, f=3) == 8
assert ample(12, f=3, weak=False) == 9
assert ample(13) == 9
assert ample(13, weak=False) == 9
assert ample(13, f=4) == 9
assert ample(13, f=4, weak=False) == 9
def test_dewitnesscouple():
"""
test deWitnessCouple function
"""
dig = 'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
wig = 'AAmdI8OSQkMJ9r-xigjEByEjIua7LHH3AOJ22PQKqljMhuhcgh9nGRcKnsz5KvKd7K_H9-1298F4Id1DxvIoEmCQ'
digb = b'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
wigb = b'AAmdI8OSQkMJ9r-xigjEByEjIua7LHH3AOJ22PQKqljMhuhcgh9nGRcKnsz5KvKd7K_H9-1298F4Id1DxvIoEmCQ'
# str
couple = dig + wig
assert len(couple) == 132
diger, wiger = deWitnessCouple(couple)
assert diger.qb64 == dig
assert wiger.qb64 == wig
assert len(couple) == 132 # not strip delete
# bytes
couple = digb + wigb
assert len(couple) == 132
diger, wiger = deWitnessCouple(couple)
assert diger.qb64b == digb
assert wiger.qb64b == wigb
assert len(couple) == 132 # not strip delete
# memoryview
couple = memoryview(couple)
assert len(couple) == 132
diger, wiger = deWitnessCouple(couple)
assert diger.qb64b == digb
assert wiger.qb64b == wigb
assert len(couple) == 132 # not strip delete
# bytearray
couple = bytearray(couple)
assert len(couple) == 132
diger, wiger = deWitnessCouple(couple)
assert diger.qb64b == digb
assert wiger.qb64b == wigb
assert len(couple) == 132 # not strip delete
# test strip delete
# str
couple = dig + wig
assert len(couple) == 132
with pytest.raises(TypeError): # immutable str so no delete
diger, wiger = deWitnessCouple(couple, strip=True)
assert len(couple) == 132 # immutable so no delete
# bytes
couple = digb + wigb
with pytest.raises(TypeError): # immutable bytes so no delete
diger, wiger = deWitnessCouple(couple, strip=True)
assert len(couple) == 132 # immutable so no delete
# memoryview
couple = memoryview(couple)
with pytest.raises(TypeError): # memoryview converted to bytes so no delete
diger, wiger = deWitnessCouple(couple, strip=True)
assert len(couple) == 132 # immutable so no delete
# bytearray
couple = bytearray(couple)
diger, wiger = deWitnessCouple(couple, strip=True)
assert diger.qb64b == digb
assert wiger.qb64b == wigb
assert len(couple) == 0 # bytearray mutable so strip delete succeeds
"""end test"""
def test_dereceiptcouple():
"""
test deReceiptCouple function
"""
pre = 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
cig = '0BMszieX0cpTOWZwa2I2LfeFAi9lrDjc1-Ip9ywl1KCNqie4ds_3mrZxHFboMC8Fu_5asnM7m67KlGC9EYaw0KDQ'
preb = b'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
cigb = b'0BMszieX0cpTOWZwa2I2LfeFAi9lrDjc1-Ip9ywl1KCNqie4ds_3mrZxHFboMC8Fu_5asnM7m67KlGC9EYaw0KDQ'
# str
couple = pre + cig
assert len(couple) == 132
prefixer, cigar = deReceiptCouple(couple)
assert prefixer.qb64 == pre
assert cigar.qb64 == cig
assert len(couple) == 132 # not strip delete
# bytes
couple = preb + cigb
assert len(couple) == 132
prefixer, cigar = deReceiptCouple(couple)
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(couple) == 132 # not strip delete
# memoryview
couple = memoryview(couple)
assert len(couple) == 132
prefixer, cigar = deReceiptCouple(couple)
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(couple) == 132 # not strip delete
# bytearray
couple = bytearray(couple)
assert len(couple) == 132
prefixer, cigar = deReceiptCouple(couple)
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(couple) == 132 # not strip delete
# test strip delete
# str
couple = pre + cig
assert len(couple) == 132
with pytest.raises(TypeError): # immutable str so no delete
prefixer, cigar = deReceiptCouple(couple, strip=True)
assert len(couple) == 132 # immutable so no delete
# bytes
couple = preb + cigb
with pytest.raises(TypeError): # immutable bytes so no delete
prefixer, cigar = deReceiptCouple(couple, strip=True)
assert len(couple) == 132 # immutable so no delete
# memoryview
couple = memoryview(couple)
with pytest.raises(TypeError): # memoryview converted to bytes so no delete
prefixer, cigar = deReceiptCouple(couple, strip=True)
assert len(couple) == 132 # immutable so no delete
# bytearray
couple = bytearray(couple)
prefixer, cigar = deReceiptCouple(couple, strip=True)
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(couple) == 0 # bytearray mutable so strip delete succeeds
"""end test"""
def test_desourcecouple():
"""
test deSourceCouple function
"""
snu = '0AAAAAAAAAAAAAAAAAAAAABQ'
dig = 'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
snub = b'0AAAAAAAAAAAAAAAAAAAAABQ'
digb = b'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
# str
couple = snu + dig
assert len(couple) == 68
seqner, diger = deSourceCouple(couple)
assert seqner.qb64 == snu
assert diger.qb64 == dig
assert len(couple) == 68 # not strip delete
# bytes
couple = snub + digb
assert len(couple) == 68
seqner, diger = deSourceCouple(couple)
assert seqner.qb64b == snub
assert diger.qb64b == digb
assert len(couple) == 68 # not strip delete
# memoryview
couple = memoryview(couple)
assert len(couple) == 68
seqner, diger = deSourceCouple(couple)
assert seqner.qb64b == snub
assert diger.qb64b == digb
assert len(couple) == 68 # not strip delete
# bytearray
couple = bytearray(couple)
assert len(couple) == 68
seqner, diger = deSourceCouple(couple)
assert seqner.qb64b == snub
assert diger.qb64b == digb
assert len(couple) == 68 # not strip delete
# test strip delete
# str
couple = snu + dig
assert len(couple) == 68
with pytest.raises(TypeError): # immutable str so no delete
seqner, diger = deSourceCouple(couple, strip=True)
assert len(couple) == 68 # immutable so no delete
# bytes
couple = snub + digb
with pytest.raises(TypeError): # immutable bytes so no delete
seqner, diger = deSourceCouple(couple, strip=True)
assert len(couple) == 68 # immutable so no delete
# memoryview
couple = memoryview(couple)
with pytest.raises(TypeError): # memoryview converted to bytes so no delete
seqner, diger = deSourceCouple(couple, strip=True)
assert len(couple) == 68 # immutable so no delete
# bytearray
couple = bytearray(couple)
seqner, diger = deSourceCouple(couple, strip=True)
assert seqner.qb64b == snub
assert diger.qb64b == digb
assert len(couple) == 0 # bytearray mutable so strip delete succeeds
"""end test"""
def test_dereceipttriple():
"""
test deReceiptTriple function
"""
dig = 'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
pre = 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
cig = '0BMszieX0cpTOWZwa2I2LfeFAi9lrDjc1-Ip9ywl1KCNqie4ds_3mrZxHFboMC8Fu_5asnM7m67KlGC9EYaw0KDQ'
digb = b'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
preb = b'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
cigb = b'0BMszieX0cpTOWZwa2I2LfeFAi9lrDjc1-Ip9ywl1KCNqie4ds_3mrZxHFboMC8Fu_5asnM7m67KlGC9EYaw0KDQ'
# str
triple = dig + pre + cig
diger, prefixer, cigar = deReceiptTriple(triple)
assert diger.qb64 == dig
assert prefixer.qb64 == pre
assert cigar.qb64 == cig
assert len(triple) == 176
# bytes
triple = digb + preb + cigb
diger, prefixer, cigar = deReceiptTriple(triple)
assert diger.qb64b == digb
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(triple) == 176
# memoryview
triple = memoryview(triple)
diger, prefixer, cigar = deReceiptTriple(triple)
assert diger.qb64b == digb
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(triple) == 176
# bytearray
triple = bytearray(triple)
diger, prefixer, cigar = deReceiptTriple(triple)
assert diger.qb64b == digb
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(triple) == 176
# test strip delete
# str converts to bytes
triple = dig + pre + cig
assert len(triple) == 176
with pytest.raises(TypeError):
diger, prefixer, cigar = deReceiptTriple(triple, strip=True)
assert len(triple) == 176 # immutable so no strip delete
# bytes
triple = digb + preb + cigb
assert len(triple) == 176
with pytest.raises(TypeError):
diger, prefixer, cigar = deReceiptTriple(triple, strip=True)
assert len(triple) == 176 # immutable so no strip delete
# memoryview converts to bytes
triple = memoryview(triple)
assert len(triple) == 176
with pytest.raises(TypeError):
diger, prefixer, cigar = deReceiptTriple(triple, strip=True)
assert len(triple) == 176 # immutable so no strip delete
# bytearray
triple = bytearray(triple)
assert len(triple) == 176
diger, prefixer, cigar = deReceiptTriple(triple, strip=True)
assert diger.qb64b == digb
assert prefixer.qb64b == preb
assert cigar.qb64b == cigb
assert len(triple) == 0 # mutable so strip delete
"""end test"""
def test_dequadruple():
"""
test test_dequadruple function
"""
spre = 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
ssnu = '0AAAAAAAAAAAAAAAAAAAAABQ'
sdig = 'EsLkveIFUPvt38xhtgYYJRCCpAGO7WjjHVR37Pawv67E'
sig = 'AFmdI8OSQkMJ9r-xigjEByEjIua7LHH3AOJ22PQKqljMhuhcgh9nGRcKnsz5KvKd7K_H9-1298F4Id1DxvIoEmCQ'
spreb = b'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
ssnub = b'0AAAAAAAAAAAAAAAAAAAAABQ'
sdigb = b'EsLkveIFUPvt38xhtgYYJRCCpAGO7WjjHVR37Pawv67E'
sigb = b'AFmdI8OSQkMJ9r-xigjEByEjIua7LHH3AOJ22PQKqljMhuhcgh9nGRcKnsz5KvKd7K_H9-1298F4Id1DxvIoEmCQ'
#str
quadruple = spre + ssnu + sdig + sig
sprefixer, sseqner, sdiger, siger = deTransReceiptQuadruple(quadruple)
assert sprefixer.qb64 == spre
assert sseqner.qb64 == ssnu
assert sdiger.qb64 == sdig
assert siger.qb64 == sig
assert len(quadruple) == 200
# bytes
quadruple = spreb + ssnub + sdigb + sigb
sprefixer, sseqner, sdiger, sigar = deTransReceiptQuadruple(quadruple)
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quadruple) == 200
# memoryview
quadruple = memoryview(quadruple)
sprefixer, sseqner, sdiger, sigar = deTransReceiptQuadruple(quadruple)
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quadruple) == 200
# bytearray
quadruple = bytearray(quadruple)
sprefixer, sseqner, sdiger, sigar = deTransReceiptQuadruple(quadruple)
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quadruple) == 200
# test strip delete
# str converts to bytes
quadruple = spre + ssnu + sdig + sig
assert len(quadruple) == 200
with pytest.raises(TypeError): # immutable so no strip delete
sprefixer, sseqner, sdiger, siger = deTransReceiptQuadruple(quadruple, strip=True)
assert len(quadruple) == 200 # immutable so no strip delete
# bytes
quadruple = spreb + ssnub + sdigb + sigb
assert len(quadruple) == 200
with pytest.raises(TypeError): # immutable so no strip delete
sprefixer, sseqner, sdiger, siger = deTransReceiptQuadruple(quadruple, strip=True)
assert len(quadruple) == 200 # immutable so no strip delete
# memoryview converts to bytes
quadruple = memoryview(quadruple)
assert len(quadruple) == 200
with pytest.raises(TypeError): # immutable so no strip delete
sprefixer, sseqner, sdiger, siger = deTransReceiptQuadruple(quadruple, strip=True)
assert len(quadruple) == 200 # immutable so no strip delete
# bytearray
quadruple = bytearray(quadruple)
assert len(quadruple) == 200
sprefixer, sseqner, sdiger, sigar = deTransReceiptQuadruple(quadruple, strip=True)
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quadruple) == 0 # mutable so strip delete
"""end test"""
def test_dequintuple():
"""
test dequintuple function
"""
edig = 'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
spre = 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
ssnu = '0AAAAAAAAAAAAAAAAAAAAABQ'
sdig = 'EsLkveIFUPvt38xhtgYYJRCCpAGO7WjjHVR37Pawv67E'
sig = 'AFmdI8OSQkMJ9r-xigjEByEjIua7LHH3AOJ22PQKqljMhuhcgh9nGRcKnsz5KvKd7K_H9-1298F4Id1DxvIoEmCQ'
edigb = b'E62X8Lfrl9lZbCGz8cfKIvM_cqLyTYVLSFLhnttezlzQ'
spreb = b'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
ssnub = b'0AAAAAAAAAAAAAAAAAAAAABQ'
sdigb = b'EsLkveIFUPvt38xhtgYYJRCCpAGO7WjjHVR37Pawv67E'
sigb = b'AFmdI8OSQkMJ9r-xigjEByEjIua7LHH3AOJ22PQKqljMhuhcgh9nGRcKnsz5KvKd7K_H9-1298F4Id1DxvIoEmCQ'
#str
sealet = spre + ssnu + sdig
quintuple = edig + sealet + sig
ediger, sprefixer, sseqner, sdiger, siger = deTransReceiptQuintuple(quintuple)
assert ediger.qb64 == edig
assert sprefixer.qb64 == spre
assert sseqner.qb64 == ssnu
assert sdiger.qb64 == sdig
assert siger.qb64 == sig
assert len(quintuple) == 244
# bytes
quintuple = edigb + spreb + ssnub + sdigb + sigb
ediger, sprefixer, sseqner, sdiger, sigar = deTransReceiptQuintuple(quintuple)
assert ediger.qb64b == edigb
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quintuple) == 244
# memoryview
quintuple = memoryview(quintuple)
ediger, sprefixer, sseqner, sdiger, sigar = deTransReceiptQuintuple(quintuple)
assert ediger.qb64b == edigb
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quintuple) == 244
# bytearray
quintuple = bytearray(quintuple)
ediger, sprefixer, sseqner, sdiger, sigar = deTransReceiptQuintuple(quintuple)
assert ediger.qb64b == edigb
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quintuple) == 244
# test deletive
# str converts to bytes
sealet = spre + ssnu + sdig
quintuple = edig + sealet + sig
assert len(quintuple) == 244
with pytest.raises(TypeError):
ediger, sprefixer, sseqner, sdiger, siger = deTransReceiptQuintuple(quintuple, strip=True)
assert len(quintuple) == 244 # immutable so no strip delete
# bytes
quintuple = edigb + spreb + ssnub + sdigb + sigb
assert len(quintuple) == 244
with pytest.raises(TypeError):
ediger, sprefixer, sseqner, sdiger, siger = deTransReceiptQuintuple(quintuple, strip=True)
assert len(quintuple) == 244 # immutable so no strip delete
# memoryview converts to bytes
quintuple = memoryview(quintuple)
assert len(quintuple) == 244
with pytest.raises(TypeError):
ediger, sprefixer, sseqner, sdiger, siger = deTransReceiptQuintuple(quintuple, strip=True)
assert len(quintuple) == 244 # immutable so no strip delete
# bytearray
quintuple = bytearray(quintuple)
assert len(quintuple) == 244
ediger, sprefixer, sseqner, sdiger, sigar = deTransReceiptQuintuple(quintuple, strip=True)
assert ediger.qb64b == edigb
assert sprefixer.qb64b == spreb
assert sseqner.qb64b == ssnub
assert sdiger.qb64b == sdigb
assert siger.qb64b == sigb
assert len(quintuple) == 0 # mutable so strip delete
"""end test"""
def test_lastestloc():
"""
Test LastEstLoc namedtuple
"""
lastEst = LastEstLoc(s=1, d='E12345')
assert isinstance(lastEst, LastEstLoc)
assert 1 in lastEst
assert lastEst.s == 1
assert 'E12345' in lastEst
assert lastEst.d == 'E12345'
"""End Test """
def test_seals():
"""
Test seal namedtuples
"""
seal = SealDigest(d='E12345')
assert isinstance(seal, SealDigest)
assert 'E12345' in seal
assert seal.d == 'E12345'
assert seal._asdict() == dict(d='E12345')
seal = SealRoot(rd='EABCDE')
assert isinstance(seal, SealRoot)
assert 'EABCDE' in seal
assert seal.rd == 'EABCDE'
assert seal._asdict() == dict(rd='EABCDE')
seal = SealEvent(i='B4321', s='1', d='Eabcd')
assert isinstance(seal, SealEvent)
assert 'B4321' in seal
assert seal.i == 'B4321'
assert '1' in seal
assert seal.s == '1'
assert 'Eabcd' in seal
assert seal.d == 'Eabcd'
assert seal._asdict() == dict(i='B4321', s='1', d='Eabcd')
assert seal._fields == ('i', 's', 'd')
seal = SealLocation(i='B4321', s='1', t='ixn', p='Eabcd')
assert isinstance(seal, SealLocation)
assert 'B4321' in seal
assert seal.i == 'B4321'
assert '1' in seal
assert seal.s == '1'
assert 'ixn' in seal
assert seal.t == 'ixn'
assert 'Eabcd' in seal
assert seal.p == 'Eabcd'
assert seal._asdict() == dict(i='B4321', s='1', t='ixn', p='Eabcd')
assert seal._fields == ('i', 's', 't', 'p')
"""End Test """
def test_keyeventfuncs():
"""
Test the support functionality for key event generation functions
"""
# seed = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
seed = (b'\x9f{\xa8\xa7\xa8C9\x96&\xfa\xb1\x99\xeb\xaa \xc4\x1bG\x11\xc4\xaeSAR'
b'\xc9\xbd\x04\x9d\x85)~\x93')
# Inception: Non-transferable (ephemeral) case
signer0 = Signer(raw=seed, transferable=False) # original signing keypair non transferable
assert signer0.code == MtrDex.Ed25519_Seed
assert signer0.verfer.code == MtrDex.Ed25519N
keys0 = [signer0.verfer.qb64]
serder = incept(keys=keys0) # default nxt is empty so abandoned
assert serder.ked["i"] == 'BWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc'
assert serder.ked["n"] == ""
assert serder.raw == (b'{"v":"KERI10JSON0000c1_","i":"BWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"0","t":"icp","kt":"1","k":["BWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhc'
b'c"],"n":"","bt":"0","b":[],"c":[],"a":[]}')
with pytest.raises(DerivationError):
# non-empty nxt wtih non-transferable code
serder = incept(keys=keys0, code=MtrDex.Ed25519N, nxt="ABCDE")
# Inception: Transferable Case but abandoned in incept so equivalent
signer0 = Signer(raw=seed) # original signing keypair transferable default
assert signer0.code == MtrDex.Ed25519_Seed
assert signer0.verfer.code == MtrDex.Ed25519
keys0 = [signer0.verfer.qb64]
serder = incept(keys=keys0) # default nxt is empty so abandoned
assert serder.ked["i"] == 'DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc'
assert serder.ked["n"] == ""
assert serder.raw == (b'{"v":"KERI10JSON0000c1_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"0","t":"icp","kt":"1","k":["DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhc'
b'c"],"n":"","bt":"0","b":[],"c":[],"a":[]}')
# Inception: Transferable not abandoned i.e. next not empty
# seed = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
seed1 = (b'\x83B~\x04\x94\xe3\xceUQy\x11f\x0c\x93]\x1e\xbf\xacQ\xb5\xd6Y^\xa2E\xfa\x015'
b'\x98Y\xdd\xe8')
signer1 = Signer(raw=seed1) # next signing keypair transferable is default
assert signer1.code == MtrDex.Ed25519_Seed
assert signer1.verfer.code == MtrDex.Ed25519
keys1 = [signer1.verfer.qb64]
# compute nxt digest
nexter1 = Nexter(keys=keys1) # dfault sith is 1
nxt1 = nexter1.qb64 # transferable so nxt is not empty
assert nxt1 == 'EcBCalw7Oe2ohLDra2ovwlv72PrlQZdQdaoSZ1Vvk5P4'
serder0 = incept(keys=keys0, nxt=nxt1)
pre = serder0.ked["i"]
assert serder0.ked["i"] == 'DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc'
assert serder0.ked["s"] == '0'
assert serder0.ked["t"] == Ilks.icp
assert serder0.ked["n"] == nxt1
assert (b'{"v":"KERI10JSON0000ed_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"0","t":"icp","kt":"1","k":["DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhc'
b'c"],"n":"EcBCalw7Oe2ohLDra2ovwlv72PrlQZdQdaoSZ1Vvk5P4","bt":"0","b":[],"c":['
b'],"a":[]}')
assert serder0.dig == 'E9VNRo_h8UsObC0Qn7tvbssuij3d-fSE3hU9_4mg4Czk'
# Rotation: Transferable not abandoned i.e. next not empty
# seed = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
seed2 = (b'\xbe\x96\x02\xa9\x88\xce\xf9O\x1e\x0fo\xc0\xff\x98\xb6\xfa\x1e\xa2y\xf2'
b'e\xf9AL\x1aeK\xafj\xa1pB')
signer2 = Signer(raw=seed2) # next signing keypair transferable is default
assert signer2.code == MtrDex.Ed25519_Seed
assert signer2.verfer.code == MtrDex.Ed25519
keys2 = [signer2.verfer.qb64]
# compute nxt digest
nexter2 = Nexter(keys=keys2)
nxt2 = nexter2.qb64 # transferable so nxt is not empty
assert nxt2 == 'EAXTvbATMnVRGjyC_VCNuXcPTxxpLanfzj14u3QMsD_U'
serder1 = rotate(pre=pre, keys=keys1, dig=serder0.dig, nxt=nxt2, sn=1)
assert serder1.ked["i"] == pre
assert serder1.ked["s"] == '1'
assert serder1.ked["t"] == Ilks.rot
assert serder1.ked["n"] == nxt2
assert serder1.ked["p"] == serder0.dig
assert serder1.raw == (b'{"v":"KERI10JSON000122_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"1","t":"rot","p":"E9VNRo_h8UsObC0Qn7tvbssuij3d-fSE3hU9_4mg4Czk","kt":"1'
b'","k":["DHgZa-u7veNZkqk2AxCnxrINGKfQ0bRiaf9FdA_-_49A"],"n":"EAXTvbATMnVRGjyC'
b'_VCNuXcPTxxpLanfzj14u3QMsD_U","bt":"0","br":[],"ba":[],"a":[]}')
# Interaction:
serder2 = interact(pre=pre, dig=serder1.dig, sn=2)
assert serder2.ked["i"] == pre
assert serder2.ked["s"] == '2'
assert serder2.ked["t"] == Ilks.ixn
assert serder2.ked["p"] == serder1.dig
assert serder2.raw == (b'{"v":"KERI10JSON000098_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"2","t":"ixn","p":"Ep7NXM1Yz6N1zZBAkoBr2BaTxUVWlJD7r9AWfG3c1bXk","a":[]}')
# Receipt
serder3 = receipt(pre=pre, sn=0, dig=serder2.dig)
assert serder3.ked["i"] == pre
assert serder3.ked["s"] == "0"
assert serder3.ked["t"] == Ilks.rct
assert serder3.ked["d"] == serder2.dig
assert serder3.raw == (b'{"v":"KERI10JSON000091_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"0","t":"rct","d":"ENjADDuGb8QwXfG2-Q6mqFCZqnze_L9dufxLAg8AGtCE"}')
# Receipt transferable identifier
serderA = incept(keys=keys0, nxt=nxt1, code=MtrDex.Blake3_256)
seal = SealEvent(i=serderA.ked["i"], s=serderA.ked["s"], d=serderA.dig)
assert seal.i == serderA.ked["i"]
assert seal.d == serderA.dig
serder4 = receipt(pre=pre, sn=2, dig=serder2.dig)
assert serder4.ked["i"] == pre
assert serder4.ked["s"] == "2"
assert serder4.ked["t"] == Ilks.rct
assert serder4.ked["d"] == serder2.dig
assert serder4.raw == (b'{"v":"KERI10JSON000091_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-Wk1x4ejhcc",'
b'"s":"2","t":"rct","d":"ENjADDuGb8QwXfG2-Q6mqFCZqnze_L9dufxLAg8AGtCE"}')
siger = signer0.sign(ser=serderA.raw, index=0)
msg = messagize(serder=serder4, sigers=[siger], seal=seal)
assert msg == bytearray(b'{"v":"KERI10JSON000091_","i":"DWzwEHHzq7K0gzQPYGGwTmuupUhPx5_yZ-'
b'Wk1x4ejhcc","s":"2","t":"rct","d":"ENjADDuGb8QwXfG2-Q6mqFCZqnze_'
b'L9dufxLAg8AGtCE"}-FABEheV9-CZwshHxId9DgDh_r3r00QpK7jzbA4EF2HIZ9o'
b'c0AAAAAAAAAAAAAAAAAAAAAAAEEl4SNEyeZhL_TfPclHCaaXNaofpNedM6l21ilJ'
b'0I3i4-AABAAUMALurW2PdrOG5l_sfRdiIdKqpDZShNcPNQ-6vJb6dwG9-wahbQrj'
b'303CRsAVT0gOqI9Ty4EoEyiv6LAX5w9Cw')
# Delegated Inception:
# Transferable not abandoned i.e. next not empty
# seed = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
seedD = (b'\x83B~\x04\x94\xe3\xceUQy\x11f\x0c\x93]\x1e\xbf\xacQ\xb5\xd6Y^\xa2E\xfa\x015'
b'\x98Y\xdd\xe8')
signerD = Signer(raw=seedD) # next signing keypair transferable is default
assert signerD.code == MtrDex.Ed25519_Seed
assert signerD.verfer.code == MtrDex.Ed25519
keysD = [signerD.verfer.qb64]
# compute nxt digest
nexterD = Nexter(keys=keysD) # default sith is 1
nxtD = nexterD.qb64 # transferable so nxt is not empty
delpre = 'ENdHxtdjCQUM-TVO8CgJAKb8ykXsFe4u9epTUQFCL7Yd'
serderD = delcept(keys=keysD, delpre=delpre, nxt=nxtD)
pre = serderD.ked["i"]
assert serderD.ked["i"] == 'EZUY3a0vbBLqUtC1d9ZrutSeg1nlMPVuDfxUi4LpE03g'
assert serderD.ked["s"] == '0'
assert serderD.ked["t"] == Ilks.dip
assert serderD.ked["n"] == nxtD
assert serderD.raw == (b'{"v":"KERI10JSON000121_","i":"EZUY3a0vbBLqUtC1d9ZrutSeg1nlMPVuDfxUi4LpE03g",'
b'"s":"0","t":"dip","kt":"1","k":["DHgZa-u7veNZkqk2AxCnxrINGKfQ0bRiaf9FdA_-_49'
b'A"],"n":"EcBCalw7Oe2ohLDra2ovwlv72PrlQZdQdaoSZ1Vvk5P4","bt":"0","b":[],"c":['
b'],"a":[],"di":"ENdHxtdjCQUM-TVO8CgJAKb8ykXsFe4u9epTUQFCL7Yd"}')
assert serderD.dig == 'EJvaOecTdGRsMG1ZBosajDHAWDL6wmmSrn13q5AhSuGU'
# Delegated Rotation:
# Transferable not abandoned i.e. next not empty
seedR = (b'\xbe\x96\x02\xa9\x88\xce\xf9O\x1e\x0fo\xc0\xff\x98\xb6\xfa\x1e\xa2y\xf2'
b'e\xf9AL\x1aeK\xafj\xa1pB')
signerR = Signer(raw=seedR) # next signing keypair transferable is default
assert signerR.code == MtrDex.Ed25519_Seed
assert signerR.verfer.code == MtrDex.Ed25519
keysR = [signerR.verfer.qb64]
# compute nxt digest
nexterR = Nexter(keys=keysR) # default sith is 1
nxtR = nexterR.qb64 # transferable so nxt is not empty
delpre = 'ENdHxtdjCQUM-TVO8CgJAKb8ykXsFe4u9epTUQFCL7Yd'
serderR = deltate(pre=pre,
keys=keysR,
dig='EgNkcl_QewzrRSKH2p9zUskHI462CuIMS_HQIO132Z30',
sn=4,
nxt=nxtR)
assert serderR.ked["i"] == pre
assert serderR.ked["s"] == '4'
assert serderR.ked["t"] == Ilks.drt
assert serderR.ked["n"] == nxtR
assert serderR.raw == (b'{"v":"KERI10JSON000122_","i":"EZUY3a0vbBLqUtC1d9ZrutSeg1nlMPVuDfxUi4LpE03g",'
b'"s":"4","t":"drt","p":"EgNkcl_QewzrRSKH2p9zUskHI462CuIMS_HQIO132Z30","kt":"1'
b'","k":["D8u3hipCxZnkM_O0jfaZLJMk9ERI428T0psRO0JVgh4c"],"n":"EAXTvbATMnVRGjyC'
b'_VCNuXcPTxxpLanfzj14u3QMsD_U","bt":"0","br":[],"ba":[],"a":[]}')
assert serderR.dig == 'E99ece6FIrvll2dlnNjXfuHGvclWeNqvErHxCZPZDwGs'
# State KSN
"""
state(pre,
sn,
dig,
eilk,
keys,
eevt,
sith=None, # default based on keys
nxt="",
toad=None, # default based on wits
wits=None, # default to []
cnfg=None, # default to []
dpre=None,
version=Version,
kind=Serials.json,
):
Key State Dict
{
"v": "KERI10JSON00011c_",
"i": "EaU6JR2nmwyZ-i0d8JZAoTNZH3ULvYAfSVPzhzS6b5CM",
"s": "2",
"t": "ksn",
"d": "EAoTNZH3ULvaU6JR2nmwyYAfSVPzhzZ-i0d8JZS6b5CM",
"te": "rot",
"kt": "1",
"k": ["DaU6JR2nmwyZ-i0d8JZAoTNZH3ULvYAfSVPzhzS6b5CM"],
"n": "EZ-i0d8JZAoTNZH3ULvaU6JR2nmwyYAfSVPzhzS6b5CM",
"wt": "1",
"w": ["DnmwyYAfSVPzhzS6b5CMZ-i0d8JZAoTNZH3ULvaU6JR2"],
"c": ["eo"],
"ee":
{
"s": "1",
"d": "EAoTNZH3ULvaU6JR2nmwyYAfSVPzhzZ-i0d8JZS6b5CM",
"wr": ["Dd8JZAoTNZH3ULvaU6JR2nmwyYAfSVPzhzS6b5CMZ-i0"],
"wa": ["DnmwyYAfSVPzhzS6b5CMZ-i0d8JZAoTNZH3ULvaU6JR2"]
},
"di": "EYAfSVPzhzS6b5CMaU6JR2nmwyZ-i0d8JZAoTNZH3ULv",
}
"di": "" when not delegated
"""
# use same salter for all but different path
# salt = pysodium.randombytes(pysodium.crypto_pwhash_SALTBYTES)
salt = b'\x05\xaa\x8f-S\x9a\xe9\xfaU\x9c\x02\x9c\x9b\x08Hu'
salter = Salter(raw=salt)
# State NonDelegated (key state notification)
# create transferable key pair for controller of KEL
signerC = salter.signer(path="C", temp=True)
assert signerC.code == MtrDex.Ed25519_Seed
assert signerC.verfer.code == MtrDex.Ed25519 # transferable
preC = signerC.verfer.qb64 # use public key verfer.qb64 trans pre
assert preC == 'D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0'
sith = '1'
keys = [signerC.verfer.qb64]
nexter = Nexter(keys=keys) # compute nxt digest (dummy reuse keys)
nxt = nexter.qb64
assert nxt == 'E9GdMuF9rZZ9uwTjqgiCGA8r2mRsC5SQDHCyOpsW5AqQ'
# create key pairs for witnesses of KEL
signerW0 = salter.signer(path="W0", transferable=False, temp=True)
assert signerW0.verfer.code == MtrDex.Ed25519N # non-transferable
preW0 = signerW0.verfer.qb64 # use public key verfer.qb64 as pre
assert preW0 == 'BNTkstUfFBJv0R1IoNNjKpWK6zEZPxjgMc7KS2Q6_lG0'
signerW1 = salter.signer(path="W1", transferable=False, temp=True)
assert signerW1.verfer.code == MtrDex.Ed25519N # non-transferable
preW1 = signerW1.verfer.qb64 # use public key verfer.qb64 as pre
assert preW1 == 'BaEI1ytEFHqaUF26Fu4JgvsHBzeBu7Joaj2ilmx3QPwU'
signerW2 = salter.signer(path="W2", transferable=False, temp=True)
assert signerW2.verfer.code == MtrDex.Ed25519N # non-transferable
preW2 = signerW2.verfer.qb64 # use public key verfer.qb64 as pre
assert preW2 == 'B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo'
signerW3 = salter.signer(path="W3", transferable=False, temp=True)
assert signerW3.verfer.code == MtrDex.Ed25519N # non-transferable
preW3 = signerW3.verfer.qb64 # use public key verfer.qb64 as pre
assert preW3 == 'BruKyL_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y'
wits = [preW1, preW2, preW3]
toad = 2
#create namedtuple of latest est event
eevt = StateEstEvent(s='3',
d='EUskHI462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30',
br=[preW0],
ba=[preW3])
serderK = state(pre=preC,
sn=4,
dig='EgNkcl_QewzrRSKH2p9zUskHI462CuIMS_HQIO132Z30',
eilk=Ilks.ixn,
keys=keys,
eevt=eevt,
sith=sith,
nxt=nxt,
toad=toad,
wits=wits,
)
assert serderK.raw == (b'{"v":"KERI10JSON000266_","i":"D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0",'
b'"s":"4","t":"ksn","d":"EgNkcl_QewzrRSKH2p9zUskHI462CuIMS_HQIO132Z30","te":"i'
b'xn","kt":"1","k":["D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0"],"n":"E9GdM'
b'uF9rZZ9uwTjqgiCGA8r2mRsC5SQDHCyOpsW5AqQ","bt":"2","b":["BaEI1ytEFHqaUF26Fu4J'
b'gvsHBzeBu7Joaj2ilmx3QPwU","B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo","Br'
b'uKyL_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"],"c":[],"ee":{"s":"3","d":"EUskH'
b'I462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30","br":["BNTkstUfFBJv0R1IoNNjKpWK6zEZ'
b'PxjgMc7KS2Q6_lG0"],"ba":["BruKyL_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"]},"d'
b'i":""}')
assert serderK.dig == 'EvrzL3Y9UkZGnHin2CJz5iOxKfJIC9e-JPhWgNdHGOJQ'
assert serderK.pre == preC == 'D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0'
assert serderK.sn == 4
# create endorsed ksn with nontrans endorser
# create nontrans key pair for endorder of KSN
signerE = salter.signer(path="E", transferable=False, temp=True)
assert signerE.verfer.code == MtrDex.Ed25519N # non-transferable
preE = signerE.verfer.qb64 # use public key verfer.qb64 as pre
assert preE == 'ByvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI'
cigarE = signerE.sign(ser=serderK.raw)
assert signerE.verfer.verify(sig=cigarE.raw, ser=serderK.raw)
msg = messagize(serderK, cigars=[cigarE])
assert msg == bytearray(b'{"v":"KERI10JSON000266_","i":"D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPq'
b'WVK9ZBNZk0","s":"4","t":"ksn","d":"EgNkcl_QewzrRSKH2p9zUskHI462C'
b'uIMS_HQIO132Z30","te":"ixn","kt":"1","k":["D3pYGFaqnrALTyejaJaGA'
b'VhNpSCtqyerPqWVK9ZBNZk0"],"n":"E9GdMuF9rZZ9uwTjqgiCGA8r2mRsC5SQD'
b'HCyOpsW5AqQ","bt":"2","b":["BaEI1ytEFHqaUF26Fu4JgvsHBzeBu7Joaj2i'
b'lmx3QPwU","B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo","BruKyL'
b'_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"],"c":[],"ee":{"s":"3","d'
b'":"EUskHI462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30","br":["BNTkstUf'
b'FBJv0R1IoNNjKpWK6zEZPxjgMc7KS2Q6_lG0"],"ba":["BruKyL_b4D5ETo9u12'
b'DtLU1J6Kc1CQnigIUBKrBFz_1Y"]},"di":""}-CABByvCLRr5luWmp7keDvDuLP'
b'0kIqcyBYq79b3Dho1QvrjI0BJfvqH8zSieT_M4c77B2WopiYgcth_xjrgTast2he'
b'GdRXvdIjBG40xSrqgSJb2BNikaOBW3ay4DiIsMTc4I9UCg')
# create endorsed ksn with trans endorser
# create trans key pair for endorder of KSN
signerE = salter.signer(path="E", temp=True)
assert signerE.verfer.code == MtrDex.Ed25519 # transferable
preE = signerE.verfer.qb64 # use public key verfer.qb64 as pre
assert preE == 'DyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI'
# create SealEvent for endorsers est evt whose keys use to sign
seal = SealEvent(i=preE,
s='0',
d='EMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEhkeDZ2z')
# create endorsed ksn
sigerE = signerE.sign(ser=serderK.raw, index=0)
assert signerE.verfer.verify(sig=sigerE.raw, ser=serderK.raw)
msg = messagize(serderK, sigers=[sigerE], seal=seal)
assert msg == bytearray(b'{"v":"KERI10JSON000266_","i":"D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPq'
b'WVK9ZBNZk0","s":"4","t":"ksn","d":"EgNkcl_QewzrRSKH2p9zUskHI462C'
b'uIMS_HQIO132Z30","te":"ixn","kt":"1","k":["D3pYGFaqnrALTyejaJaGA'
b'VhNpSCtqyerPqWVK9ZBNZk0"],"n":"E9GdMuF9rZZ9uwTjqgiCGA8r2mRsC5SQD'
b'HCyOpsW5AqQ","bt":"2","b":["BaEI1ytEFHqaUF26Fu4JgvsHBzeBu7Joaj2i'
b'lmx3QPwU","B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo","BruKyL'
b'_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"],"c":[],"ee":{"s":"3","d'
b'":"EUskHI462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30","br":["BNTkstUf'
b'FBJv0R1IoNNjKpWK6zEZPxjgMc7KS2Q6_lG0"],"ba":["BruKyL_b4D5ETo9u12'
b'DtLU1J6Kc1CQnigIUBKrBFz_1Y"]},"di":""}-FABDyvCLRr5luWmp7keDvDuLP'
b'0kIqcyBYq79b3Dho1QvrjI0AAAAAAAAAAAAAAAAAAAAAAAEMuNWHss_H_kH4cG7L'
b'i1jn2DXfrEaqN7zhqTEhkeDZ2z-AABAAJfvqH8zSieT_M4c77B2WopiYgcth_xjr'
b'gTast2heGdRXvdIjBG40xSrqgSJb2BNikaOBW3ay4DiIsMTc4I9UCg')
# State Delegated (key state notification)
# create transferable key pair for controller of KEL
signerC = salter.signer(path="C", temp=True)
assert signerC.code == MtrDex.Ed25519_Seed
assert signerC.verfer.code == MtrDex.Ed25519 # transferable
preC = signerC.verfer.qb64 # use public key verfer.qb64 as trans pre
assert preC == 'D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0'
sith = '1'
keys = [signerC.verfer.qb64]
nexter = Nexter(keys=keys) # compute nxt digest (dummy reuse keys)
nxt = nexter.qb64
assert nxt == 'E9GdMuF9rZZ9uwTjqgiCGA8r2mRsC5SQDHCyOpsW5AqQ'
# create key pairs for witnesses of KEL
signerW0 = salter.signer(path="W0", transferable=False, temp=True)
assert signerW0.verfer.code == MtrDex.Ed25519N # non-transferable
preW0 = signerW0.verfer.qb64 # use public key verfer.qb64 as pre
assert preW0 == 'BNTkstUfFBJv0R1IoNNjKpWK6zEZPxjgMc7KS2Q6_lG0'
signerW1 = salter.signer(path="W1", transferable=False, temp=True)
assert signerW1.verfer.code == MtrDex.Ed25519N # non-transferable
preW1 = signerW1.verfer.qb64 # use public key verfer.qb64 as pre
assert preW1 == 'BaEI1ytEFHqaUF26Fu4JgvsHBzeBu7Joaj2ilmx3QPwU'
signerW2 = salter.signer(path="W2", transferable=False, temp=True)
assert signerW2.verfer.code == MtrDex.Ed25519N # non-transferable
preW2 = signerW2.verfer.qb64 # use public key verfer.qb64 as pre
assert preW2 == 'B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo'
signerW3 = salter.signer(path="W3", transferable=False, temp=True)
assert signerW3.verfer.code == MtrDex.Ed25519N # non-transferable
preW3 = signerW3.verfer.qb64 # use public key verfer.qb64 as pre
assert preW3 == 'BruKyL_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y'
wits = [preW1, preW2, preW3]
toad = 2
#create namedtuple of latest est event
eevt = StateEstEvent(s='3',
d='EUskHI462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30',
br=[preW0],
ba=[preW3])
# create transferable key pair for delegator of KEL
signerD = salter.signer(path="D", temp=True)
assert signerD.code == MtrDex.Ed25519_Seed
assert signerD.verfer.code == MtrDex.Ed25519 # transferable
preD = signerD.verfer.qb64 # use public key verfer.qb64 as trans pre
assert preD == 'DGz6B3ecka0XQKHaOfs0tpQqwIoHuXecuz733f-zkh7U'
serderK = state(pre=preC,
sn=4,
dig='EgNkcl_QewzrRSKH2p9zUskHI462CuIMS_HQIO132Z30',
eilk=Ilks.ixn,
keys=keys,
eevt=eevt,
sith=sith,
nxt=nxt,
toad=toad,
wits=wits,
dpre=preD
)
assert serderK.raw == (b'{"v":"KERI10JSON000292_","i":"D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0",'
b'"s":"4","t":"ksn","d":"EgNkcl_QewzrRSKH2p9zUskHI462CuIMS_HQIO132Z30","te":"i'
b'xn","kt":"1","k":["D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0"],"n":"E9GdM'
b'uF9rZZ9uwTjqgiCGA8r2mRsC5SQDHCyOpsW5AqQ","bt":"2","b":["BaEI1ytEFHqaUF26Fu4J'
b'gvsHBzeBu7Joaj2ilmx3QPwU","B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo","Br'
b'uKyL_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"],"c":[],"ee":{"s":"3","d":"EUskH'
b'I462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30","br":["BNTkstUfFBJv0R1IoNNjKpWK6zEZ'
b'PxjgMc7KS2Q6_lG0"],"ba":["BruKyL_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"]},"d'
b'i":"DGz6B3ecka0XQKHaOfs0tpQqwIoHuXecuz733f-zkh7U"}')
assert serderK.dig == 'E_XluPVlJvxTVbgZZIFIebVRGrJdHpwEL7JcDGP33DVc'
assert serderK.pre == preC == 'D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPqWVK9ZBNZk0'
assert serderK.sn == 4
# create endorsed ksn with nontrans endorser
# create nontrans key pair for endorder of KSN
signerE = salter.signer(path="E", transferable=False, temp=True)
assert signerE.verfer.code == MtrDex.Ed25519N # non-transferable
preE = signerE.verfer.qb64 # use public key verfer.qb64 as pre
assert preE == 'ByvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI'
# create endorsed ksn
cigarE = signerE.sign(ser=serderK.raw)
assert signerE.verfer.verify(sig=cigarE.raw, ser=serderK.raw)
msg = messagize(serderK, cigars=[cigarE])
assert msg == bytearray(b'{"v":"KERI10JSON000292_","i":"D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPq'
b'WVK9ZBNZk0","s":"4","t":"ksn","d":"EgNkcl_QewzrRSKH2p9zUskHI462C'
b'uIMS_HQIO132Z30","te":"ixn","kt":"1","k":["D3pYGFaqnrALTyejaJaGA'
b'VhNpSCtqyerPqWVK9ZBNZk0"],"n":"E9GdMuF9rZZ9uwTjqgiCGA8r2mRsC5SQD'
b'HCyOpsW5AqQ","bt":"2","b":["BaEI1ytEFHqaUF26Fu4JgvsHBzeBu7Joaj2i'
b'lmx3QPwU","B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo","BruKyL'
b'_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"],"c":[],"ee":{"s":"3","d'
b'":"EUskHI462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30","br":["BNTkstUf'
b'FBJv0R1IoNNjKpWK6zEZPxjgMc7KS2Q6_lG0"],"ba":["BruKyL_b4D5ETo9u12'
b'DtLU1J6Kc1CQnigIUBKrBFz_1Y"]},"di":"DGz6B3ecka0XQKHaOfs0tpQqwIoH'
b'uXecuz733f-zkh7U"}-CABByvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1Qvr'
b'jI0B4rp2Pqi_Iqz5bmrZJz9pFL4Mi6bgHl5QiAsuaZOTfnKBRseYnB_FMOJMKKWi'
b'Hr2hiPlSxIIBu9t872NlT2RfBA')
# create endorsed ksn with trans endorser
# create trans key pair for endorder of KSN
signerE = salter.signer(path="E", temp=True)
assert signerE.verfer.code == MtrDex.Ed25519 # transferable
preE = signerE.verfer.qb64 # use public key verfer.qb64 as pre
assert preE == 'DyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI'
# create SealEvent for endorsers est evt whose keys use to sign
seal = SealEvent(i=preE,
s='0',
d='EMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEhkeDZ2z')
# create endorsed ksn
sigerE = signerE.sign(ser=serderK.raw, index=0)
assert signerE.verfer.verify(sig=sigerE.raw, ser=serderK.raw)
msg = messagize(serderK, sigers=[sigerE], seal=seal)
assert msg == bytearray(b'{"v":"KERI10JSON000292_","i":"D3pYGFaqnrALTyejaJaGAVhNpSCtqyerPq'
b'WVK9ZBNZk0","s":"4","t":"ksn","d":"EgNkcl_QewzrRSKH2p9zUskHI462C'
b'uIMS_HQIO132Z30","te":"ixn","kt":"1","k":["D3pYGFaqnrALTyejaJaGA'
b'VhNpSCtqyerPqWVK9ZBNZk0"],"n":"E9GdMuF9rZZ9uwTjqgiCGA8r2mRsC5SQD'
b'HCyOpsW5AqQ","bt":"2","b":["BaEI1ytEFHqaUF26Fu4JgvsHBzeBu7Joaj2i'
b'lmx3QPwU","B7vHpy1IDsWWUnHf2GU5ud62LMYWO5lPWOrSB6ejQ1Eo","BruKyL'
b'_b4D5ETo9u12DtLU1J6Kc1CQnigIUBKrBFz_1Y"],"c":[],"ee":{"s":"3","d'
b'":"EUskHI462CuIMS_gNkcl_QewzrRSKH2p9zHQIO132Z30","br":["BNTkstUf'
b'FBJv0R1IoNNjKpWK6zEZPxjgMc7KS2Q6_lG0"],"ba":["BruKyL_b4D5ETo9u12'
b'DtLU1J6Kc1CQnigIUBKrBFz_1Y"]},"di":"DGz6B3ecka0XQKHaOfs0tpQqwIoH'
b'uXecuz733f-zkh7U"}-FABDyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1Qvr'
b'jI0AAAAAAAAAAAAAAAAAAAAAAAEMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEh'
b'keDZ2z-AABAA4rp2Pqi_Iqz5bmrZJz9pFL4Mi6bgHl5QiAsuaZOTfnKBRseYnB_F'
b'MOJMKKWiHr2hiPlSxIIBu9t872NlT2RfBA')
""" Done Test """
def test_messagize():
"""
Test messagize utility function
"""
salter = Salter(raw=b'0123456789abcdef')
with openDB(name="edy") as db, openKS(name="edy") as ks:
# Init key pair manager
mgr = Manager(keeper=ks, salt=salter.qb64)
verfers, digers, cst, nst = mgr.incept(icount=1, ncount=0, transferable=True, stem="C")
# Test with inception message
serder = incept(keys=[verfers[0].qb64], code=MtrDex.Blake3_256)
sigers = mgr.sign(ser=serder.raw, verfers=verfers) # default indexed True
assert isinstance(sigers[0], Siger)
msg = messagize(serder, sigers)
assert bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-AABAA0X9eyML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFantYynS'
b'BLWXjxYc5c_sW0052it_g6rX30kDA')
# Test with pipelined
msg = messagize(serder, sigers, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VAX-AABAA0X9eyML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFant'
b'YynSBLWXjxYc5c_sW0052it_g6rX30kDA')
# Test with seal
# create SealEvent for endorsers est evt whose keys use to sign
seal = SealEvent(i='DyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI',
s='0',
d='EMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEhkeDZ2z')
msg = messagize(serder, sigers, seal=seal)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-FABDyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI0AAAAAAAAAAAAAA'
b'AAAAAAAAAEMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEhkeDZ2z-AABAA0X9ey'
b'ML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFantYynSBLWXjxYc5c_s'
b'W0052it_g6rX30kDA')
# Test with pipelined
msg = messagize(serder, sigers, seal=seal, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VA0-FABDyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI0AAAAAAAAAA'
b'AAAAAAAAAAAAAEMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEhkeDZ2z-AABAA0'
b'X9eyML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFantYynSBLWXjxYc'
b'5c_sW0052it_g6rX30kDA')
# Test with wigers
verfers, digers, cst, nst = mgr.incept(icount=1, ncount=0, transferable=False, stem="W")
wigers = mgr.sign(ser=serder.raw, verfers=verfers) # default indexed True
assert isinstance(wigers[0], Siger)
msg = messagize(serder, wigers=wigers)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-BABAAWha5gf4wk__OEK_ZvAyA4WYArQVKfVKevOmZWliDBpdIn7oHsWgvm8T7U'
b'vEjfnKobH8lKD1ILacrT6KVIxNeCw')
# Test with wigers and pipelined
msg = messagize(serder, wigers=wigers, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VAX-BABAAWha5gf4wk__OEK_ZvAyA4WYArQVKfVKevOmZWliDBpdIn7oHsWgvm'
b'8T7UvEjfnKobH8lKD1ILacrT6KVIxNeCw')
# Test with cigars
verfers, digers, cst, nst = mgr.incept(icount=1, ncount=0, transferable=False, stem="R")
cigars = mgr.sign(ser=serder.raw, verfers=verfers, indexed=False)
assert isinstance(cigars[0], Cigar)
msg = messagize(serder, cigars=cigars)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-CABBmMfUwIOywRkyc5GyQXfgDA4UOAMvjvnXcaK9G939ArM0BT7b5PzUBmts-l'
b'blgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqqUYFmRhABQ-vPywggLATx'
b'BDnqQ3aBg')
# Test with cigars and pipelined
msg = messagize(serder, cigars=cigars, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VAi-CABBmMfUwIOywRkyc5GyQXfgDA4UOAMvjvnXcaK9G939ArM0BT7b5PzUBm'
b'ts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqqUYFmRhABQ-vPywgg'
b'LATxBDnqQ3aBg')
# Test with wigers and cigars
msg = messagize(serder, wigers=wigers, cigars=cigars)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-BABAAWha5gf4wk__OEK_ZvAyA4WYArQVKfVKevOmZWliDBpdIn7oHsWgvm8T7U'
b'vEjfnKobH8lKD1ILacrT6KVIxNeCw-CABBmMfUwIOywRkyc5GyQXfgDA4UOAMvjv'
b'nXcaK9G939ArM0BT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6'
b'CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg')
# Test with wigers and cigars and pipelined
msg = messagize(serder, cigars=cigars, wigers=wigers, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VA5-BABAAWha5gf4wk__OEK_ZvAyA4WYArQVKfVKevOmZWliDBpdIn7oHsWgvm'
b'8T7UvEjfnKobH8lKD1ILacrT6KVIxNeCw-CABBmMfUwIOywRkyc5GyQXfgDA4UOA'
b'MvjvnXcaK9G939ArM0BT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0J'
b'yfN6CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg')
# Test with sigers and wigers and cigars
msg = messagize(serder, sigers=sigers, cigars=cigars, wigers=wigers)
assert bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-AABAA0X9eyML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFantYynS'
b'BLWXjxYc5c_sW0052it_g6rX30kDA-BABAAWha5gf4wk__OEK_ZvAyA4WYArQVKf'
b'VKevOmZWliDBpdIn7oHsWgvm8T7UvEjfnKobH8lKD1ILacrT6KVIxNeCw-CABBmM'
b'fUwIOywRkyc5GyQXfgDA4UOAMvjvnXcaK9G939ArM0BT7b5PzUBmts-lblgOBzdT'
b'hIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg')
# Test with sigers and wigers and cigars and pipelines
msg = messagize(serder, sigers=sigers, cigars=cigars, wigers=wigers, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VBQ-AABAA0X9eyML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFant'
b'YynSBLWXjxYc5c_sW0052it_g6rX30kDA-BABAAWha5gf4wk__OEK_ZvAyA4WYAr'
b'QVKfVKevOmZWliDBpdIn7oHsWgvm8T7UvEjfnKobH8lKD1ILacrT6KVIxNeCw-CA'
b'BBmMfUwIOywRkyc5GyQXfgDA4UOAMvjvnXcaK9G939ArM0BT7b5PzUBmts-lblgO'
b'BzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqqUYFmRhABQ-vPywggLATxBDnq'
b'Q3aBg')
# Test with receipt message
ked = serder.ked
reserder = receipt(pre=ked["i"],
sn=int(ked["s"], 16),
dig=serder.dig)
# Test with wigers
wigers = mgr.sign(ser=serder.raw, verfers=verfers, indexed=True)
assert isinstance(wigers[0], Siger)
msg = messagize(serder, wigers=wigers)
assert bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-BABAAT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqq'
b'UYFmRhABQ-vPywggLATxBDnqQ3aBg')
# Test with cigars
cigars = mgr.sign(ser=serder.raw, verfers=verfers, indexed=False) # sign event not receipt
msg = messagize(reserder, cigars=cigars)
assert msg == bytearray(b'{"v":"KERI10JSON000091_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"rct","d":"EumwYGLL1YseRT1_oSyUwt5AJVGvw'
b'2hFIcmNpzEHvbm0"}-CABBmMfUwIOywRkyc5GyQXfgDA4UOAMvjvnXcaK9G939Ar'
b'M0BT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqqUYFm'
b'RhABQ-vPywggLATxBDnqQ3aBg')
# Test with wigers and cigars
msg = messagize(serder, wigers=wigers, cigars=cigars, )
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-BABAAT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZhsOqq'
b'UYFmRhABQ-vPywggLATxBDnqQ3aBg-CABBmMfUwIOywRkyc5GyQXfgDA4UOAMvjv'
b'nXcaK9G939ArM0BT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6'
b'CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg')
# Test with wigers and cigars and pipelined
msg = messagize(serder, wigers=wigers, cigars=cigars, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VA5-BABAAT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0JyfN6CjZh'
b'sOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg-CABBmMfUwIOywRkyc5GyQXfgDA4UOA'
b'MvjvnXcaK9G939ArM0BT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhymgr4_dD0J'
b'yfN6CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg')
# Test with sigers and seal and wigers and cigars and pipelined
msg = messagize(serder, sigers=sigers, seal=seal, wigers=wigers,
cigars=cigars, pipelined=True)
assert msg == bytearray(b'{"v":"KERI10JSON0000c1_","i":"ECE-_06hkl9stCfQu4IluYevW5_YlxHc6e'
b'GOM-ijM93o","s":"0","t":"icp","kt":"1","k":["D6J_jzCECalv_iTKSwx'
b'zPnuycxEi5fRuo3UUN7T0CVGM"],"n":"","bt":"0","b":[],"c":[],"a":[]'
b'}-VBt-FABDyvCLRr5luWmp7keDvDuLP0kIqcyBYq79b3Dho1QvrjI0AAAAAAAAAA'
b'AAAAAAAAAAAAAEMuNWHss_H_kH4cG7Li1jn2DXfrEaqN7zhqTEhkeDZ2z-AABAA0'
b'X9eyML4ioPIk9AuBQFN5hGnGeRgywzNorzFydvyFTm-sjjLrFantYynSBLWXjxYc'
b'5c_sW0052it_g6rX30kDA-BABAAT7b5PzUBmts-lblgOBzdThIQjKCbq8gMinhym'
b'gr4_dD0JyfN6CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg-CABBmMfUwIOywR'
b'kyc5GyQXfgDA4UOAMvjvnXcaK9G939ArM0BT7b5PzUBmts-lblgOBzdThIQjKCbq'
b'8gMinhymgr4_dD0JyfN6CjZhsOqqUYFmRhABQ-vPywggLATxBDnqQ3aBg')
""" Done Test """
def test_kever():
"""
Test the support functionality for Kever class
Key Event Verifier
"""
with pytest.raises(TypeError): # Missing required positional arguments
kever = Kever()
with openDB() as db: # Transferable case
# Setup inception key event dict
salt = b'\x05\xaa\x8f-S\x9a\xe9\xfaU\x9c\x02\x9c\x9b\x08Hu'
salter = Salter(raw=salt)
# create current key
sith = 1 # one signer
# original signing keypair transferable default
skp0 = salter.signer(path="A", temp=True)
assert skp0.code == MtrDex.Ed25519_Seed
assert skp0.verfer.code == MtrDex.Ed25519
keys = [skp0.verfer.qb64]
# create next key
# next signing keypair transferable is default
skp1 = salter.signer(path="N", temp=True)
assert skp1.code == MtrDex.Ed25519_Seed
assert skp1.verfer.code == MtrDex.Ed25519
nxtkeys = [skp1.verfer.qb64]
# compute nxt digest
nexter = Nexter(keys=nxtkeys)
nxt = nexter.qb64
assert nxt == "E_d8cX6vuQwmD5P62_b663OeaVCLbiBFsirRHJsHn9co" # transferable so nxt is not empty
sn = 0 # inception event so 0
toad = 0 # no witnesses
nsigs = 1 # one attached signature unspecified index
ked0 = dict(v=Versify(kind=Serials.json, size=0),
i="", # qual base 64 prefix
s="{:x}".format(sn), # hex string no leading zeros lowercase
t=Ilks.icp,
kt="{:x}".format(sith), # hex string no leading zeros lowercase
k=keys, # list of signing keys each qual Base64
n=nxt, # hash qual Base64
bt="{:x}".format(toad), # hex string no leading zeros lowercase
b=[], # list of qual Base64 may be empty
c=[], # list of config ordered mappings may be empty
a=[], # list of seals
)
# Derive AID from ked
aid0 = Prefixer(ked=ked0, code=MtrDex.Ed25519)
assert aid0.code == MtrDex.Ed25519
assert aid0.qb64 == skp0.verfer.qb64 == 'DBQOqSaf6GqVAoPxb4UARrklS8kLYj3JqsR6b4AASDd4'
# update ked with pre
ked0["i"] = aid0.qb64
# Serialize ked0
tser0 = Serder(ked=ked0)
# sign serialization
tsig0 = skp0.sign(tser0.raw, index=0)
# verify signature
assert skp0.verfer.verify(tsig0.raw, tser0.raw)
kever = Kever(serder=tser0, sigers=[tsig0], baser=db) # no error
assert kever.baser == db
assert kever.cues == None
assert kever.prefixer.qb64 == aid0.qb64
assert kever.sn == 0
assert [verfer.qb64 for verfer in kever.verfers] == [skp0.verfer.qb64]
assert kever.nexter.qb64 == nexter.qb64
serderK = kever.state()
assert serderK.pre == kever.prefixer.qb64
assert serderK.sn == kever.sn
assert ([verfer.qb64 for verfer in serderK.verfers] ==
[verfer.qb64 for verfer in kever.verfers])
assert serderK.raw == (b'{"v":"KERI10JSON00017e_","i":"DBQOqSaf6GqVAoPxb4UARrklS8kLYj3JqsR6b4AASDd4",'
b'"s":"0","t":"ksn","d":"ErSmGekLPyCOf0VIVmYJJLHo6CVdd1K_ApeFUYsU_5WE","te":"i'
b'cp","kt":"1","k":["DBQOqSaf6GqVAoPxb4UARrklS8kLYj3JqsR6b4AASDd4"],"n":"E_d8c'
b'X6vuQwmD5P62_b663OeaVCLbiBFsirRHJsHn9co","bt":"0","b":[],"c":[],"ee":{"s":"0'
b'","d":"ErSmGekLPyCOf0VIVmYJJLHo6CVdd1K_ApeFUYsU_5WE","br":[],"ba":[]},"di":"'
b'"}')
with openDB() as db: # Non-Transferable case
# Setup inception key event dict
# create current key
sith = 1 # one signer
skp0 = Signer(transferable=False) # original signing keypair non-transferable
assert skp0.code == MtrDex.Ed25519_Seed
assert skp0.verfer.code == MtrDex.Ed25519N
keys = [skp0.verfer.qb64]
# create next key Error case
skp1 = Signer() # next signing keypair transferable is default
assert skp1.code == MtrDex.Ed25519_Seed
assert skp1.verfer.code == MtrDex.Ed25519
nxtkeys = [skp1.verfer.qb64]
# compute nxt digest
nexter = Nexter(keys=nxtkeys)
nxt = nexter.qb64 # nxt is not empty so error
sn = 0 # inception event so 0
toad = 0 # no witnesses
nsigs = 1 # one attached signature unspecified index
ked0 = dict(v=Versify(kind=Serials.json, size=0),
i="", # qual base 64 prefix
s="{:x}".format(sn), # hex string no leading zeros lowercase
t=Ilks.icp,
kt="{:x}".format(sith), # hex string no leading zeros lowercase
k=keys, # list of signing keys each qual Base64
n=nxt, # hash qual Base64
bt="{:x}".format(toad), # hex string no leading zeros lowercase
b=[], # list of qual Base64 may be empty
c=[], # list of config ordered mappings may be empty
a={}, # list of seals
)
# Derive AID from ked
with pytest.raises(DerivationError):
aid0 = Prefixer(ked=ked0, code=MtrDex.Ed25519N)
# assert aid0.code == MtrDex.Ed25519N
# assert aid0.qb64 == skp0.verfer.qb64
# update ked with pre
ked0["i"] =skp0.verfer.qb64
# Serialize ked0
tser0 = Serder(ked=ked0)
# sign serialization
tsig0 = skp0.sign(tser0.raw, index=0)
# verify signature
assert skp0.verfer.verify(tsig0.raw, tser0.raw)
with pytest.raises(ValidationError):
kever = Kever(serder=tser0, sigers=[tsig0], baser=db)
#retry with valid empty nxt
nxt = "" # nxt is empty so no error
sn = 0 # inception event so 0
toad = 0 # no witnesses
nsigs = 1 # one attached signature unspecified index
ked0 = dict(v=Versify(kind=Serials.json, size=0),
i="", # qual base 64 prefix
s="{:x}".format(sn), # hex string no leading zeros lowercase
t=Ilks.icp,
kt="{:x}".format(sith), # hex string no leading zeros lowercase
k=keys, # list of signing keys each qual Base64
n=nxt, # hash qual Base64
bt="{:x}".format(toad), # hex string no leading zeros lowercase
b=[], # list of qual Base64 may be empty
c=[], # list of config ordered mappings may be empty
a=[], # list of seals
)
# Derive AID from ked
aid0 = Prefixer(ked=ked0, code=MtrDex.Ed25519N)
assert aid0.code == MtrDex.Ed25519N
assert aid0.qb64 == skp0.verfer.qb64
# update ked with pre
ked0["i"] = aid0.qb64
# Serialize ked0
tser0 = Serder(ked=ked0)
# sign serialization
tsig0 = skp0.sign(tser0.raw, index=0)
# verify signature
assert skp0.verfer.verify(tsig0.raw, tser0.raw)
kever = Kever(serder=tser0, sigers=[tsig0], baser=db) # valid so no error
""" Done Test """
def test_keyeventsequence_0():
"""
Test generation of a sequence of key events
"""
# manual process to generate a list of secrets
# root = pysodium.randombytes(pysodium.crypto_pwhash_SALTBYTES)
# root = b'g\x15\x89\x1a@\xa4\xa47\x07\xb9Q\xb8\x18\xcdJW'
#root = '0AZxWJGkCkpDcHuVG4GM1KVw'
#rooter = CryMat(qb64=root)
#assert rooter.qb64 == root
#assert rooter.code == CryTwoDex.Seed_128
#signers = generateSigners(root=rooter.raw, count=8, transferable=True)
#secrets = [signer.qb64 for signer in signers]
#secrets =generateSecrets(root=rooter.raw, count=8, transferable=True)
# Test sequence of events given set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
# create signers
signers = [Signer(qb64=secret) for secret in secrets] # faster
assert [signer.qb64 for signer in signers] == secrets
pubkeys = [signer.verfer.qb64 for signer in signers]
assert pubkeys == [
'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA',
'DVcuJOOJF1IE8svqEtrSuyQjGTd2HhfAkt9y2QkUtFJI',
'DT1iAhBWCkvChxNWsby2J0pJyxBIxbAtbLA0Ljx-Grh8',
'DKPE5eeJRzkRTMOoRGVd2m18o8fLqM2j9kaxLhV3x8AQ',
'D1kcBE7h0ImWW6_Sp7MQxGYSshZZz6XM7OiUE5DXm0dU',
'D4JDgo3WNSUpt-NG14Ni31_GCmrU0r38yo7kgDuyGkQM',
'DVjWcaNX2gCkHOjk6rkmqPBCxkRCqwIJ-3OjdYmMwxf4',
'DT1nEDepd6CSAMCE7NY_jlLdG6_mKUlKS_mW-2HJY1hg'
]
with openDB(name="controller") as conlgr:
event_digs = [] # list of event digs in sequence
# Event 0 Inception Transferable (nxt digest not empty)
keys0 = [signers[0].verfer.qb64]
# compute nxt digest from keys1
keys1 = [signers[1].verfer.qb64]
nexter1 = Nexter(keys=keys1)
nxt1 = nexter1.qb64 # transferable so nxt is not empty
assert nxt1 == 'EPYuj8mq_PYYsoBKkzX1kxSPGYBWaIya3slgCOyOtlqU'
serder0 = incept(keys=keys0, nxt=nxt1)
pre = serder0.ked["i"]
event_digs.append(serder0.dig)
assert serder0.ked["i"] == 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
assert serder0.ked["s"] == '0'
assert serder0.ked["kt"] == '1'
assert serder0.ked["k"] == keys0
assert serder0.ked["n"] == nxt1
assert serder0.dig == 'ECw4ANul798tewQs25OLSDVXs-VHF_qXtm_EHk8ojTng'
# sign serialization and verify signature
sig0 = signers[0].sign(serder0.raw, index=0)
assert signers[0].verfer.verify(sig0.raw, serder0.raw)
# create key event verifier state
kever = Kever(serder=serder0, sigers=[sig0], baser=conlgr)
assert kever.prefixer.qb64 == pre
assert kever.sn == 0
assert kever.serder.diger.qb64 == serder0.dig
assert kever.ilk == Ilks.icp
assert kever.tholder.thold == 1
assert [verfer.qb64 for verfer in kever.verfers] == keys0
assert kever.nexter.qb64 == nxt1
assert kever.estOnly == False
assert kever.transferable == True
# Event 1 Rotation Transferable
# compute nxt digest from keys2
keys2 = [signers[2].verfer.qb64]
nexter2 = Nexter(keys=keys2)
nxt2 = nexter2.qb64 # transferable so nxt is not empty
assert nxt2 == 'E-dapdcC6XR1KWmWDsNl4J_OxcGxNZw1Xd95JH5a34fI'
serder1 = rotate(pre=pre, keys=keys1, dig=serder0.dig, nxt=nxt2, sn=1)
event_digs.append(serder1.dig)
assert serder1.ked["i"] == pre
assert serder1.ked["s"] == '1'
assert serder1.ked["kt"] == '1'
assert serder1.ked["k"] == keys1
assert serder1.ked["n"] == nxt2
assert serder1.ked["p"] == serder0.dig
# sign serialization and verify signature
sig1 = signers[1].sign(serder1.raw, index=0)
assert signers[1].verfer.verify(sig1.raw, serder1.raw)
# update key event verifier state
kever.update(serder=serder1, sigers=[sig1])
assert kever.prefixer.qb64 == pre
assert kever.sn == 1
assert kever.serder.diger.qb64 == serder1.dig
assert kever.ilk == Ilks.rot
assert [verfer.qb64 for verfer in kever.verfers] == keys1
assert kever.nexter.qb64 == nxt2
# Event 2 Rotation Transferable
# compute nxt digest from keys3
keys3 = [signers[3].verfer.qb64]
nexter3 = Nexter(keys=keys3)
nxt3 = nexter3.qb64 # transferable so nxt is not empty
serder2 = rotate(pre=pre, keys=keys2, dig=serder1.dig, nxt=nxt3, sn=2)
event_digs.append(serder2.dig)
assert serder2.ked["i"] == pre
assert serder2.ked["s"] == '2'
assert serder2.ked["k"] == keys2
assert serder2.ked["n"] == nxt3
assert serder2.ked["p"] == serder1.dig
# sign serialization and verify signature
sig2 = signers[2].sign(serder2.raw, index=0)
assert signers[2].verfer.verify(sig2.raw, serder2.raw)
# update key event verifier state
kever.update(serder=serder2, sigers=[sig2])
assert kever.prefixer.qb64 == pre
assert kever.sn == 2
assert kever.serder.diger.qb64 == serder2.dig
assert kever.ilk == Ilks.rot
assert [verfer.qb64 for verfer in kever.verfers] == keys2
assert kever.nexter.qb64 == nxt3
# Event 3 Interaction
serder3 = interact(pre=pre, dig=serder2.dig, sn=3)
event_digs.append(serder3.dig)
assert serder3.ked["i"] == pre
assert serder3.ked["s"] == '3'
assert serder3.ked["p"] == serder2.dig
# sign serialization and verify signature
sig3 = signers[2].sign(serder3.raw, index=0)
assert signers[2].verfer.verify(sig3.raw, serder3.raw)
# update key event verifier state
kever.update(serder=serder3, sigers=[sig3])
assert kever.prefixer.qb64 == pre
assert kever.sn == 3
assert kever.serder.diger.qb64 == serder3.dig
assert kever.ilk == Ilks.ixn
assert [verfer.qb64 for verfer in kever.verfers] == keys2 # no change
assert kever.nexter.qb64 == nxt3 # no change
# Event 4 Interaction
serder4 = interact(pre=pre, dig=serder3.dig, sn=4)
event_digs.append(serder4.dig)
assert serder4.ked["i"] == pre
assert serder4.ked["s"] == '4'
assert serder4.ked["p"] == serder3.dig
# sign serialization and verify signature
sig4 = signers[2].sign(serder4.raw, index=0)
assert signers[2].verfer.verify(sig4.raw, serder4.raw)
# update key event verifier state
kever.update(serder=serder4, sigers=[sig4])
assert kever.prefixer.qb64 == pre
assert kever.sn == 4
assert kever.serder.diger.qb64 == serder4.dig
assert kever.ilk == Ilks.ixn
assert [verfer.qb64 for verfer in kever.verfers] == keys2 # no change
assert kever.nexter.qb64 == nxt3 # no change
# Event 5 Rotation Transferable
# compute nxt digest from keys4
keys4 = [signers[4].verfer.qb64]
nexter4 = Nexter(keys=keys4)
nxt4 = nexter4.qb64 # transferable so nxt is not empty
serder5 = rotate(pre=pre, keys=keys3, dig=serder4.dig, nxt=nxt4, sn=5)
event_digs.append(serder5.dig)
assert serder5.ked["i"] == pre
assert serder5.ked["s"] == '5'
assert serder5.ked["k"] == keys3
assert serder5.ked["n"] == nxt4
assert serder5.ked["p"] == serder4.dig
# sign serialization and verify signature
sig5 = signers[3].sign(serder5.raw, index=0)
assert signers[3].verfer.verify(sig5.raw, serder5.raw)
# update key event verifier state
kever.update(serder=serder5, sigers=[sig5])
assert kever.prefixer.qb64 == pre
assert kever.sn == 5
assert kever.serder.diger.qb64 == serder5.dig
assert kever.ilk == Ilks.rot
assert [verfer.qb64 for verfer in kever.verfers] == keys3
assert kever.nexter.qb64 == nxt4
# Event 6 Interaction
serder6 = interact(pre=pre, dig=serder5.dig, sn=6)
event_digs.append(serder6.dig)
assert serder6.ked["i"] == pre
assert serder6.ked["s"] == '6'
assert serder6.ked["p"] == serder5.dig
# sign serialization and verify signature
sig6 = signers[3].sign(serder6.raw, index=0)
assert signers[3].verfer.verify(sig6.raw, serder6.raw)
# update key event verifier state
kever.update(serder=serder6, sigers=[sig6])
assert kever.prefixer.qb64 == pre
assert kever.sn == 6
assert kever.serder.diger.qb64 == serder6.dig
assert kever.ilk == Ilks.ixn
assert [verfer.qb64 for verfer in kever.verfers] == keys3 # no change
assert kever.nexter.qb64 == nxt4 # no change
# Event 7 Rotation to null NonTransferable Abandon
nxt5 = "" # nxt digest is empty
serder7 = rotate(pre=pre, keys=keys4, dig=serder6.dig, nxt=nxt5, sn=7)
event_digs.append(serder7.dig)
assert serder7.ked["i"] == pre
assert serder7.ked["s"] == '7'
assert serder7.ked["k"] == keys4
assert serder7.ked["n"] == nxt5
assert serder7.ked["p"] == serder6.dig
# sign serialization and verify signature
sig7 = signers[4].sign(serder7.raw, index=0)
assert signers[4].verfer.verify(sig7.raw, serder7.raw)
# update key event verifier state
kever.update(serder=serder7, sigers=[sig7])
assert kever.prefixer.qb64 == pre
assert kever.sn == 7
assert kever.serder.diger.qb64 == serder7.dig
assert kever.ilk == Ilks.rot
assert [verfer.qb64 for verfer in kever.verfers] == keys4
assert kever.nexter == None
assert not kever.transferable
# Event 8 Interaction
serder8 = interact(pre=pre, dig=serder7.dig, sn=8)
assert serder8.ked["i"] == pre
assert serder8.ked["s"] == '8'
assert serder8.ked["p"] == serder7.dig
# sign serialization and verify signature
sig8 = signers[4].sign(serder8.raw, index=0)
assert signers[4].verfer.verify(sig8.raw, serder8.raw)
# update key event verifier state
with pytest.raises(ValidationError): # nontransferable so reject update
kever.update(serder=serder8, sigers=[sig8])
# Event 8 Rotation
keys5 = [signers[5].verfer.qb64]
nexter5 = Nexter(keys=keys5)
nxt5 = nexter4.qb64 # transferable so nxt is not empty
serder8 = rotate(pre=pre, keys=keys5, dig=serder7.dig, nxt=nxt5, sn=8)
assert serder8.ked["i"] == pre
assert serder8.ked["s"] == '8'
assert serder8.ked["p"] == serder7.dig
# sign serialization and verify signature
sig8 = signers[4].sign(serder8.raw, index=0)
assert signers[4].verfer.verify(sig8.raw, serder8.raw)
# update key event verifier state
with pytest.raises(ValidationError): # nontransferable so reject update
kever.update(serder=serder8, sigers=[sig8])
db_digs = [bytes(val).decode("utf-8") for val in kever.baser.getKelIter(pre)]
assert db_digs == event_digs
""" Done Test """
def test_keyeventsequence_1():
"""
Test generation of a sequence of key events
Test when EstOnly trait in config of inception event. Establishment only
"""
# Test sequence of events given set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
# create signers
signers = [Signer(qb64=secret) for secret in secrets] # faster
assert [signer.qb64 for signer in signers] == secrets
pubkeys = [signer.verfer.qb64 for signer in signers]
assert pubkeys == [
'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA',
'DVcuJOOJF1IE8svqEtrSuyQjGTd2HhfAkt9y2QkUtFJI',
'DT1iAhBWCkvChxNWsby2J0pJyxBIxbAtbLA0Ljx-Grh8',
'DKPE5eeJRzkRTMOoRGVd2m18o8fLqM2j9kaxLhV3x8AQ',
'D1kcBE7h0ImWW6_Sp7MQxGYSshZZz6XM7OiUE5DXm0dU',
'D4JDgo3WNSUpt-NG14Ni31_GCmrU0r38yo7kgDuyGkQM',
'DVjWcaNX2gCkHOjk6rkmqPBCxkRCqwIJ-3OjdYmMwxf4',
'DT1nEDepd6CSAMCE7NY_jlLdG6_mKUlKS_mW-2HJY1hg'
]
# New Sequence establishment only
with openDB(name="controller") as conlgr:
event_digs = [] # list of event digs in sequence
# Event 0 Inception Transferable (nxt digest not empty)
keys0 = [signers[0].verfer.qb64]
# compute nxt digest from keys1
keys1 = [signers[1].verfer.qb64]
nexter1 = Nexter(keys=keys1)
nxt1 = nexter1.qb64 # transferable so nxt is not empty
cnfg = [TraitDex.EstOnly] # EstOnly
serder0 = incept(keys=keys0, nxt=nxt1, cnfg=cnfg)
event_digs.append(serder0.dig)
pre = serder0.ked["i"]
assert serder0.ked["i"] == 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
assert serder0.ked["s"] == '0'
assert serder0.ked["kt"] == '1'
assert serder0.ked["k"] == keys0
assert serder0.ked["n"] == nxt1
assert serder0.ked["c"] == cnfg
# sign serialization and verify signature
sig0 = signers[0].sign(serder0.raw, index=0)
assert signers[0].verfer.verify(sig0.raw, serder0.raw)
# create key event verifier state
kever = Kever(serder=serder0, sigers=[sig0], baser=conlgr)
assert kever.prefixer.qb64 == pre
assert kever.sn == 0
assert kever.serder.diger.qb64 == serder0.dig
assert kever.ilk == Ilks.icp
assert kever.tholder.thold == 1
assert [verfer.qb64 for verfer in kever.verfers] == keys0
assert kever.nexter.qb64 == nxt1
assert kever.estOnly == True
assert kever.transferable == True
# Event 1 Interaction. Because EstOnly, this event not included in KEL
serder1 = interact(pre=pre, dig=serder0.dig, sn=1)
assert serder1.ked["i"] == pre
assert serder1.ked["s"] == '1'
assert serder1.ked["p"] == serder0.dig
# sign serialization and verify signature
sig1 = signers[0].sign(serder1.raw, index=0)
assert signers[0].verfer.verify(sig1.raw, serder1.raw)
# update key event verifier state
with pytest.raises(ValidationError): # attempt ixn with estOnly
kever.update(serder=serder1, sigers=[sig1])
# Event 1 Rotation Transferable
# compute nxt digest from keys2 but from event0
keys2 = [signers[2].verfer.qb64]
nexter2 = Nexter(keys=keys2)
nxt2 = nexter2.qb64 # transferable so nxt is not empty
assert nxt2 == 'E-dapdcC6XR1KWmWDsNl4J_OxcGxNZw1Xd95JH5a34fI'
serder2 = rotate(pre=pre, keys=keys1, dig=serder0.dig, nxt=nxt2, sn=1)
event_digs.append(serder2.dig)
assert serder2.ked["i"] == pre
assert serder2.ked["s"] == '1'
assert serder2.ked["kt"] == '1'
assert serder2.ked["k"] == keys1
assert serder2.ked["n"] == nxt2
assert serder2.ked["p"] == serder0.dig
# sign serialization and verify signature
sig2 = signers[1].sign(serder2.raw, index=0)
assert signers[1].verfer.verify(sig2.raw, serder2.raw)
# update key event verifier state
kever.update(serder=serder2, sigers=[sig2])
assert kever.prefixer.qb64 == pre
assert kever.sn == 1
assert kever.serder.diger.qb64 == serder2.dig
assert kever.ilk == Ilks.rot
assert [verfer.qb64 for verfer in kever.verfers] == keys1
assert kever.nexter.qb64 == nxt2
db_digs = [bytes(val).decode("utf-8") for val in kever.baser.getKelIter(pre)]
assert db_digs == event_digs
""" Done Test """
def test_kevery():
"""
Test the support functionality for Kevery factory class
Key Event Verifier Factory
"""
logger.setLevel("ERROR")
# Test sequence of events given set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
with openDB("controller") as conlgr, openDB("validator") as vallgr:
event_digs = [] # list of event digs in sequence
# create event stream
msgs = bytearray()
# create signers
signers = [Signer(qb64=secret) for secret in secrets] # faster
assert [signer.qb64 for signer in signers] == secrets
# Event 0 Inception Transferable (nxt digest not empty)
serder = incept(keys=[signers[0].verfer.qb64],
nxt=Nexter(keys=[signers[1].verfer.qb64]).qb64)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[0].sign(serder.raw, index=0) # return siger
# create key event verifier state
kever = Kever(serder=serder, sigers=[siger], baser=conlgr)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
assert msgs == bytearray(b'{"v":"KERI10JSON0000ed_","i":"DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOo'
b'eKtWTOunRA","s":"0","t":"icp","kt":"1","k":["DSuhyBcPZEZLK-fcw5t'
b'zHn2N46wRCG_ZOoeKtWTOunRA"],"n":"EPYuj8mq_PYYsoBKkzX1kxSPGYBWaIy'
b'a3slgCOyOtlqU","bt":"0","b":[],"c":[],"a":[]}-AABAAmagesCSY8QhYY'
b'HCJXEWpsGD62qoLt2uyT0_Mq5lZPR88JyS5UrwFKFdcjPqyKc_SKaKDJhkGWCk07'
b'k_kVkjyCA')
# Event 1 Rotation Transferable
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[1].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[2].verfer.qb64]).qb64,
sn=1)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[1].sign(serder.raw, index=0) # returns siger
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 2 Rotation Transferable
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[2].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[3].verfer.qb64]).qb64,
sn=2)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[2].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 3 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=3)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[2].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 4 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=4)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[2].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 5 Rotation Transferable
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[3].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[4].verfer.qb64]).qb64,
sn=5)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[3].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 6 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=6)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[3].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 7 Rotation to null NonTransferable Abandon
# nxt digest is empty
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[4].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt="",
sn=7)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[4].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 8 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=8)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[4].sign(serder.raw, index=0)
# update key event verifier state
with pytest.raises(ValidationError): # nulled so reject any more events
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 8 Rotation
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[4].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[5].verfer.qb64]).qb64,
sn=8)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[4].sign(serder.raw, index=0)
# update key event verifier state
with pytest.raises(ValidationError): # nontransferable so reject update
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
assert len(msgs) == 3171
pre = kever.prefixer.qb64
db_digs = [bytes(val).decode("utf-8") for val in kever.baser.getKelIter(pre)]
assert db_digs == event_digs
kevery = Kevery(db=vallgr)
# test for incomplete event in stream (new process just hangs waiting for more bytes)
# kevery.process(ims=kes[:20])
# assert pre not in kevery.kevers # shortage so gives up
Parser().process(ims=msgs, kvy=kevery)
# kevery.process(ims=msgs)
assert pre in kevery.kevers
vkever = kevery.kevers[pre]
assert vkever.sn == kever.sn
assert vkever.verfers[0].qb64 == kever.verfers[0].qb64
assert vkever.verfers[0].qb64 == signers[4].verfer.qb64
db_digs = [bytes(val).decode("utf-8") for val in kevery.db.getKelIter(pre)]
assert db_digs == event_digs
assert not os.path.exists(kevery.db.path)
assert not os.path.exists(kever.baser.path)
""" Done Test """
def test_multisig_digprefix():
"""
Test multisig with self-addressing (digest) pre
"""
# Test sequence of events given set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
with openDB("controller") as conlgr, openDB("validator") as vallgr:
# create event stream
msgs = bytearray()
# create signers
signers = [Signer(qb64=secret) for secret in secrets] # faster
assert [siger.qb64 for siger in signers] == secrets
# Event 0 Inception Transferable (nxt digest not empty)
# 2 0f 3 multisig
keys = [signers[0].verfer.qb64, signers[1].verfer.qb64, signers[2].verfer.qb64]
nxtkeys = [signers[3].verfer.qb64, signers[4].verfer.qb64, signers[5].verfer.qb64]
sith = "2"
code = MtrDex.Blake3_256 # Blake3 digest of incepting data
serder = incept(keys=keys,
code=code,
sith=sith,
nxt=Nexter(keys=nxtkeys).qb64)
# create sig counter
count = len(keys)
counter = Counter(CtrDex.ControllerIdxSigs, count=count) # default is count = 1
# sign serialization
sigers = [signers[i].sign(serder.raw, index=i) for i in range(count)]
# create key event verifier state
kever = Kever(serder=serder, sigers=sigers, baser=conlgr)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
for siger in sigers:
msgs.extend(siger.qb64b)
assert msgs == bytearray(b'{"v":"KERI10JSON00014b_","i":"EsiHneigxgDopAidk_dmHuiUJR3kAaeqpg'
b'OAj9ZZd4q8","s":"0","t":"icp","kt":"2","k":["DSuhyBcPZEZLK-fcw5t'
b'zHn2N46wRCG_ZOoeKtWTOunRA","DVcuJOOJF1IE8svqEtrSuyQjGTd2HhfAkt9y'
b'2QkUtFJI","DT1iAhBWCkvChxNWsby2J0pJyxBIxbAtbLA0Ljx-Grh8"],"n":"E'
b'9izzBkXX76sqt0N-tfLzJeRqj0W56p4pDQ_ZqNCDpyw","bt":"0","b":[],"c"'
b':[],"a":[]}-AADAAhcaP-l0DkIKlJ87iIVcDx-m0iKPdSArEu63b-2cSEn9wXVG'
b'NpWw9nfwxodQ9G8J3q_Pm-AWfDwZGD9fobWuHBAAB6mz7zP0xFNBEBfSKG4mjpPb'
b'eOXktaIyX8mfsEa1A3Psf7eKxSrJ5Woj3iUB2AhhLg412-zkk795qxsK2xfdxBAA'
b'Cj5wdW-EyUJNgW0LHePQcSFNxW3ZyPregL4H2FoOrsPxLa3MZx6xYTh6i7YRMGY5'
b'0ezEjV81hkI1Yce75M_bPCQ')
# Event 1 Rotation Transferable
keys = nxtkeys
sith = "2"
nxtkeys = [signers[5].verfer.qb64, signers[6].verfer.qb64, signers[7].verfer.qb64]
serder = rotate(pre=kever.prefixer.qb64,
keys=keys,
sith=sith,
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=nxtkeys).qb64,
sn=1)
# create sig counter
count = len(keys)
counter = Counter(CtrDex.ControllerIdxSigs, count=count) # default is count = 1
# sign serialization
sigers = [signers[i].sign(serder.raw, index=i-count) for i in range(count, count+count)]
# update key event verifier state
kever.update(serder=serder, sigers=sigers)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
for siger in sigers:
msgs.extend(siger.qb64b)
# Event 2 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=2)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs, count=count) # default is count = 1
# sign serialization
sigers = [signers[i].sign(serder.raw, index=i-count) for i in range(count, count+count)]
# update key event verifier state
kever.update(serder=serder, sigers=sigers)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
for siger in sigers:
msgs.extend(siger.qb64b)
# Event 4 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=3)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs, count=count) # default is count = 1
# sign serialization
sigers = [signers[i].sign(serder.raw, index=i-count) for i in range(count, count+count)]
# update key event verifier state
kever.update(serder=serder, sigers=sigers)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
for siger in sigers:
msgs.extend(siger.qb64b)
# Event 7 Rotation to null NonTransferable Abandon
# nxt digest is empty
keys = nxtkeys
serder = rotate(pre=kever.prefixer.qb64,
keys=keys,
sith="2",
dig=kever.serder.diger.qb64,
nxt="",
sn=4)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs, count=count) # default is count = 1
# sign serialization
sigers = [signers[i].sign(serder.raw, index=i-5) for i in range(5, 8)]
# update key event verifier state
kever.update(serder=serder, sigers=sigers)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
for siger in sigers:
msgs.extend(siger.qb64b)
assert len(msgs) == 2699
kevery = Kevery(db=vallgr)
Parser().process(ims=msgs, kvy=kevery)
# kevery.process(ims=msgs)
pre = kever.prefixer.qb64
assert pre in kevery.kevers
vkever = kevery.kevers[pre]
assert vkever.sn == kever.sn
assert vkever.verfers[0].qb64 == kever.verfers[0].qb64
assert vkever.verfers[0].qb64 == signers[5].verfer.qb64
assert not os.path.exists(kevery.db.path)
""" Done Test """
def test_recovery():
"""
Test Recovery event
"""
# set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
# create signers
signers = [Signer(qb64=secret) for secret in secrets] # faster
assert [signer.qb64 for signer in signers] == secrets
with openDB("controller") as conlgr, openDB("validator") as vallgr:
event_digs = [] # list of event digs in sequence to verify against database
# create event stream
kes = bytearray()
sn = esn = 0 # sn and last establishment sn = esn
# Event 0 Inception Transferable (nxt digest not empty)
serder = incept(keys=[signers[esn].verfer.qb64],
nxt=Nexter(keys=[signers[esn+1].verfer.qb64]).qb64)
assert sn == int(serder.ked["s"], 16) == 0
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0) # return siger
# create key event verifier state
kever = Kever(serder=serder, sigers=[siger], baser=conlgr)
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Rotation Transferable
sn += 1
esn += 1
assert sn == esn == 1
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[esn].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[esn+1].verfer.qb64]).qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0) # returns siger
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 2
assert esn == 1
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Rotation Transferable
sn += 1
esn += 1
assert sn == 3
assert esn == 2
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[esn].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[esn+1].verfer.qb64]).qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 4
assert esn == 2
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 5
assert esn == 2
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 6
assert esn == 2
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Rotation Recovery at sn = 5
sn = 5
esn += 1
assert sn == 5
assert esn == 3
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[esn].verfer.qb64],
dig=event_digs[sn-1],
nxt=Nexter(keys=[signers[esn+1].verfer.qb64]).qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 6
assert esn == 3
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[esn].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
assert kever.verfers[0].qb64 == signers[esn].verfer.qb64
pre = kever.prefixer.qb64
db_digs = [bytes(val).decode("utf-8") for val in kever.baser.getKelIter(pre)]
assert len(db_digs) == len(event_digs) == 9
assert db_digs[0:6] == event_digs[0:6]
assert db_digs[-1] == event_digs[-1]
assert db_digs[7] == event_digs[6]
assert db_digs[6] == event_digs[7]
db_est_digs = [bytes(val).decode("utf-8") for val in kever.baser.getKelEstIter(pre)]
assert len(db_est_digs) == 7
assert db_est_digs[0:5] == event_digs[0:5]
assert db_est_digs[5:7] == event_digs[7:9]
kevery = Kevery(db=vallgr)
Parser().process(ims=kes, kvy=kevery)
# kevery.process(ims=kes)
assert pre in kevery.kevers
vkever = kevery.kevers[pre]
assert vkever.sn == kever.sn
assert vkever.verfers[0].qb64 == kever.verfers[0].qb64 == signers[esn].verfer.qb64
y_db_digs = [bytes(val).decode("utf-8") for val in kevery.db.getKelIter(pre)]
assert db_digs == y_db_digs
y_db_est_digs = [bytes(val).decode("utf-8") for val in kevery.db.getKelEstIter(pre)]
assert db_est_digs == y_db_est_digs
assert not os.path.exists(kevery.db.path)
assert not os.path.exists(kever.baser.path)
""" Done Test """
def test_receipt():
"""
Test event receipt message and attached couplets
"""
# manual process to generate a list of secrets
# root = pysodium.randombytes(pysodium.crypto_pwhash_SALTBYTES)
# secrets = generateSecrets(root=root, count=8)
# Direct Mode coe is controller, val is validator
# set of secrets (seeds for private keys)
coeSecrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
# create signers
coeSigners = [Signer(qb64=secret) for secret in coeSecrets]
assert [signer.qb64 for signer in coeSigners] == coeSecrets
# set of secrets (seeds for private keys)
valSecrets = ['AgjD4nRlycmM5cPcAkfOATAp8wVldRsnc9f1tiwctXlw',
'AKUotEE0eAheKdDJh9QvNmSEmO_bjIav8V_GmctGpuCQ',
'AK-nVhMMJciMPvmF5VZE_9H-nhrgng9aJWf7_UHPtRNM',
'AT2cx-P5YUjIw_SLCHQ0pqoBWGk9s4N1brD-4pD_ANbs',
'Ap5waegfnuP6ezC18w7jQiPyQwYYsp9Yv9rYMlKAYL8k',
'Aqlc_FWWrxpxCo7R12uIz_Y2pHUH2prHx1kjghPa8jT8',
'AagumsL8FeGES7tYcnr_5oN6qcwJzZfLKxoniKUpG4qc',
'ADW3o9m3udwEf0aoOdZLLJdf1aylokP0lwwI_M2J9h0s']
# create signers
valSigners = [Signer(qb64=secret) for secret in valSecrets]
assert [signer.qb64 for signer in valSigners] == valSecrets
# create receipt signer prefixer default code is non-transferable
valSigner = Signer(qb64=valSecrets[0], transferable=False)
valPrefixer = Prefixer(qb64=valSigner.verfer.qb64)
assert valPrefixer.code == MtrDex.Ed25519N
valpre = valPrefixer.qb64
assert valpre == 'B8KY1sKmgyjAiUDdUBPNPyrSz_ad_Qf9yzhDNZlEKiMc'
with openDB("controller") as coeLogger, openDB("validator") as valLogger:
coeKevery = Kevery(db=coeLogger)
valKevery = Kevery(db=valLogger)
event_digs = [] # list of event digs in sequence to verify against database
# create event stream
kes = bytearray()
sn = esn = 0 # sn and last establishment sn = esn
#create receipt msg stream
res = bytearray()
# Event 0 Inception Transferable (nxt digest not empty)
serder = incept(keys=[coeSigners[esn].verfer.qb64],
nxt=Nexter(keys=[coeSigners[esn+1].verfer.qb64]).qb64)
assert sn == int(serder.ked["s"], 16) == 0
coepre = serder.ked["i"]
assert coepre == 'DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRA'
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0) # return Siger if index
# attach to key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
# make copy of kes so can use again for valKevery
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # create Kever using Kevery
coeKever = coeKevery.kevers[coepre]
assert coeKever.prefixer.qb64 == coepre
assert coeKever.serder.raw == serder.raw
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes) # process by Val
assert coepre in valKevery.kevers
valKever = valKevery.kevers[coepre]
assert len(kes) == 0
# create receipt from val to coe
reserder = receipt(pre=coeKever.prefixer.qb64,
sn=coeKever.sn,
dig=coeKever.serder.diger.qb64)
# sign event not receipt
valCigar = valSigner.sign(ser=serder.raw) # returns Cigar cause no index
assert valCigar.qb64 == '0Bs2d05m7zpn6C9IJhb_GspbllxJwwxdrBg9bcbCjR5B8lrXJlglmiitpq3lEusEWdmmHSY4C_fxElKfF8mySfDQ'
recnt = Counter(code=CtrDex.NonTransReceiptCouples, count=1)
assert recnt.qb64 == '-CAB'
res.extend(reserder.raw)
res.extend(recnt.qb64b)
res.extend(valPrefixer.qb64b)
res.extend(valCigar.qb64b)
assert res == bytearray(b'{"v":"KERI10JSON000091_","i":"DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOo'
b'eKtWTOunRA","s":"0","t":"rct","d":"ECw4ANul798tewQs25OLSDVXs-VHF'
b'_qXtm_EHk8ojTng"}-CABB8KY1sKmgyjAiUDdUBPNPyrSz_ad_Qf9yzhDNZlEKiM'
b'c0Bs2d05m7zpn6C9IJhb_GspbllxJwwxdrBg9bcbCjR5B8lrXJlglmiitpq3lEus'
b'EWdmmHSY4C_fxElKfF8mySfDQ')
Parser().process(ims=res, kvy=coeKevery)
# coeKevery.process(ims=res) # coe process the receipt from val
# check if in receipt database
result = coeKevery.db.getRcts(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == valPrefixer.qb64b + valCigar.qb64b
assert len(result) == 1
# create invalid receipt to escrow use invalid dig and sn so not in db
fake = reserder.dig # some other dig
reserder = receipt(pre=coeKever.prefixer.qb64,
sn=2,
dig=fake)
# sign event not receipt
valCigar = valSigner.sign(ser=serder.raw) # returns Cigar cause no index
recnt = Counter(code=CtrDex.NonTransReceiptCouples, count=1)
# attach to receipt msg stream
res.extend(reserder.raw)
res.extend(recnt.qb64b)
res.extend(valPrefixer.qb64b)
res.extend(valCigar.qb64b)
Parser().process(ims=res, kvy=coeKevery)
# coeKevery.process(ims=res) # coe process the escrow receipt from val
# check if in escrow database
result = coeKevery.db.getUres(key=snKey(pre=coeKever.prefixer.qb64,
sn=2))
assert bytes(result[0]) == fake.encode("utf-8") + valPrefixer.qb64b + valCigar.qb64b
# create invalid receipt stale use valid sn so in database but invalid dig
# so bad receipt
fake = reserder.dig # some other dig
reserder = receipt(pre=coeKever.prefixer.qb64,
sn=coeKever.sn,
dig=fake)
# sign event not receipt
valCigar = valSigner.sign(ser=serder.raw) # returns Cigar cause no index
recnt = Counter(code=CtrDex.NonTransReceiptCouples, count=1)
# attach to receipt msg stream
res.extend(reserder.raw)
res.extend(recnt.qb64b)
res.extend(valPrefixer.qb64b)
res.extend(valCigar.qb64b)
Parser().processOne(ims=res, kvy=coeKevery)
# coeKevery.processOne(ims=res) # coe process the escrow receipt from val
# no new receipt at valid dig
result = coeKevery.db.getRcts(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert len(result) == 1
# no new receipt at invalid dig
result = coeKevery.db.getRcts(key=dgKey(pre=coeKever.prefixer.qb64,
dig=fake))
assert not result
# Next Event Rotation Transferable
sn += 1
esn += 1
assert sn == esn == 1
serder = rotate(pre=coeKever.prefixer.qb64,
keys=[coeSigners[esn].verfer.qb64],
dig=coeKever.serder.diger.qb64,
nxt=Nexter(keys=[coeSigners[esn+1].verfer.qb64]).qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0) # returns siger
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # update key event verifier state
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 2
assert esn == 1
serder = interact(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0)
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # update key event verifier state
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes)
# Next Event Rotation Transferable
sn += 1
esn += 1
assert sn == 3
assert esn == 2
serder = rotate(pre=coeKever.prefixer.qb64,
keys=[coeSigners[esn].verfer.qb64],
dig=coeKever.serder.diger.qb64,
nxt=Nexter(keys=[coeSigners[esn+1].verfer.qb64]).qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0)
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # update key event verifier state
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 4
assert esn == 2
serder = interact(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0)
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # update key event verifier state
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 5
assert esn == 2
serder = interact(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0)
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # update key event verifier state
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes)
# Next Event Interaction
sn += 1 # do not increment esn
assert sn == 6
assert esn == 2
serder = interact(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64,
sn=sn)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[esn].sign(serder.raw, index=0)
#extend key event stream
kes.extend(serder.raw)
kes.extend(counter.qb64b)
kes.extend(siger.qb64b)
Parser().process(ims=bytearray(kes), kvy=coeKevery)
# coeKevery.process(ims=bytearray(kes)) # update key event verifier state
Parser().process(ims=kes, kvy=valKevery)
# valKevery.process(ims=kes)
assert coeKever.verfers[0].qb64 == coeSigners[esn].verfer.qb64
db_digs = [bytes(val).decode("utf-8") for val in coeKever.baser.getKelIter(coepre)]
assert len(db_digs) == len(event_digs) == 7
assert valKever.sn == coeKever.sn
assert valKever.verfers[0].qb64 == coeKever.verfers[0].qb64 == coeSigners[esn].verfer.qb64
assert not os.path.exists(valKevery.db.path)
assert not os.path.exists(coeKever.baser.path)
""" Done Test """
def test_direct_mode():
"""
Test direct mode with transferable validator event receipts
"""
# manual process to generate a list of secrets
# root = pysodium.randombytes(pysodium.crypto_pwhash_SALTBYTES)
# secrets = generateSecrets(root=root, count=8)
# Direct Mode initiated by coe is controller, val is validator
# but goes both ways once initiated.
# set of secrets (seeds for private keys)
coeSecrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
# create coe signers
coeSigners = [Signer(qb64=secret) for secret in coeSecrets]
assert [signer.qb64 for signer in coeSigners] == coeSecrets
# set of secrets (seeds for private keys)
valSecrets = ['AgjD4nRlycmM5cPcAkfOATAp8wVldRsnc9f1tiwctXlw',
'AKUotEE0eAheKdDJh9QvNmSEmO_bjIav8V_GmctGpuCQ',
'AK-nVhMMJciMPvmF5VZE_9H-nhrgng9aJWf7_UHPtRNM',
'AT2cx-P5YUjIw_SLCHQ0pqoBWGk9s4N1brD-4pD_ANbs',
'Ap5waegfnuP6ezC18w7jQiPyQwYYsp9Yv9rYMlKAYL8k',
'Aqlc_FWWrxpxCo7R12uIz_Y2pHUH2prHx1kjghPa8jT8',
'AagumsL8FeGES7tYcnr_5oN6qcwJzZfLKxoniKUpG4qc',
'ADW3o9m3udwEf0aoOdZLLJdf1aylokP0lwwI_M2J9h0s']
# create val signers
valSigners = [Signer(qb64=secret) for secret in valSecrets]
assert [signer.qb64 for signer in valSigners] == valSecrets
with openDB("controller") as coeLogger, openDB("validator") as valLogger:
# init Keverys
coeKevery = Kevery(db=coeLogger)
valKevery = Kevery(db=valLogger)
coe_event_digs = [] # list of coe's own event log digs to verify against database
val_event_digs = [] # list of val's own event log digs to verify against database
# init sequence numbers for both coe and val
csn = cesn = 0 # sn and last establishment sn = esn
vsn = vesn = 0 # sn and last establishment sn = esn
# Coe Event 0 Inception Transferable (nxt digest not empty)
coeSerder = incept(keys=[coeSigners[cesn].verfer.qb64],
nxt=Nexter(keys=[coeSigners[cesn+1].verfer.qb64]).qb64,
code=MtrDex.Blake3_256)
assert csn == int(coeSerder.ked["s"], 16) == 0
coepre = coeSerder.ked["i"]
assert coepre == 'EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq93bjPu5wuqA'
coe_event_digs.append(coeSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[cesn].sign(coeSerder.raw, index=0) # return Siger if index
# create serialized message
cmsg = bytearray(coeSerder.raw)
cmsg.extend(counter.qb64b)
cmsg.extend(siger.qb64b)
assert cmsg == bytearray(b'{"v":"KERI10JSON0000ed_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"0","t":"icp","kt":"1","k":["DSuhyBcPZEZLK-fcw5t'
b'zHn2N46wRCG_ZOoeKtWTOunRA"],"n":"EPYuj8mq_PYYsoBKkzX1kxSPGYBWaIy'
b'a3slgCOyOtlqU","bt":"0","b":[],"c":[],"a":[]}-AABAAvA7i3r6vs3ckx'
b'EZ2zVO8AtbjnaLKE_gwu0XNtzwB9p0fLKnC05cA07FWVx-mqoLDUO8mF1RcnoQvX'
b'WkVv_dtBA')
# create own Coe Kever in Coe's Kevery
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # send copy of cmsg
coeKever = coeKevery.kevers[coepre]
assert coeKever.prefixer.qb64 == coepre
# Val Event 0 Inception Transferable (nxt digest not empty)
valSerder = incept(keys=[valSigners[vesn].verfer.qb64],
nxt=Nexter(keys=[valSigners[vesn+1].verfer.qb64]).qb64,
code=MtrDex.Blake3_256)
assert vsn == int(valSerder.ked["s"], 16) == 0
valpre = valSerder.ked["i"]
assert valpre == 'ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQM'
val_event_digs.append(valSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = valSigners[vesn].sign(valSerder.raw, index=0) # return Siger if index
# create serialized message
vmsg = bytearray(valSerder.raw)
vmsg.extend(counter.qb64b)
vmsg.extend(siger.qb64b)
assert vmsg == bytearray(b'{"v":"KERI10JSON0000ed_","i":"ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fj'
b'ew2KMl3FQM","s":"0","t":"icp","kt":"1","k":["D8KY1sKmgyjAiUDdUBP'
b'NPyrSz_ad_Qf9yzhDNZlEKiMc"],"n":"EOWDAJvex5dZzDxeHBANyaIoUG3F4-i'
b'c81G6GwtnC4f4","bt":"0","b":[],"c":[],"a":[]}-AABAArFZxr-FnvQVZF'
b'X8WSipIxCGVCJjT6fj6qkZ-ei9UAGshPsqdX7scy0zNIB4_AfIjdSLLRWgL33AJm'
b'C2neaxuDg')
# create own Val Kever in Val's Kevery
Parser().processOne(ims=bytearray(vmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(vmsg)) # send copy of vmsg
valKever = valKevery.kevers[valpre]
assert valKever.prefixer.qb64 == valpre
# simulate sending of coe's inception message to val
Parser().process(ims=bytearray(cmsg), kvy=valKevery)
# valKevery.process(ims=bytearray(cmsg)) # make copy of msg
assert coepre in valKevery.kevers # creates Kever for coe in val's .kevers
# create receipt of coe's inception
# create seal of val's last est event
seal = SealEvent(i=valpre,
s="{:x}".format(valKever.lastEst.s),
d=valKever.lastEst.d)
coeK = valKevery.kevers[coepre] # lookup coeKever from val's .kevers
# create validator receipt
reserder = receipt(pre=coeK.prefixer.qb64,
sn=coeK.sn,
dig=coeK.serder.diger.qb64)
# sign coe's event not receipt
# look up event to sign from val's kever for coe
coeIcpDig = bytes(valKevery.db.getKeLast(key=snKey(pre=coepre, sn=csn)))
assert coeIcpDig == coeK.serder.diger.qb64b == b'EXeKMHPw0ql8vHiBOpo72AOrOsWZ3bRDL-DKkYHo4v6w'
coeIcpRaw = bytes(valKevery.db.getEvt(key=dgKey(pre=coepre, dig=coeIcpDig)))
assert coeIcpRaw == (b'{"v":"KERI10JSON0000ed_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq93bjPu5wuqA",'
b'"s":"0","t":"icp","kt":"1","k":["DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunR'
b'A"],"n":"EPYuj8mq_PYYsoBKkzX1kxSPGYBWaIya3slgCOyOtlqU","bt":"0","b":[],"c":['
b'],"a":[]}')
siger = valSigners[vesn].sign(ser=coeIcpRaw, index=0) # return Siger if index
assert siger.qb64 == 'AAocy9m9ToxeeZk-FkgjFh1x839Ims4peTy2C5MdawIwoa9wlIDbD-wGmiGO4QdrQ1lSntqUAUMkcGAzB0Q6SsAA'
rmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert rmsg == (b'{"v":"KERI10JSON000091_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"0","t":"rct","d":"EXeKMHPw0ql8vHiBOpo72AOrOsWZ3'
b'bRDL-DKkYHo4v6w"}-FABED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQ'
b'M0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24EnxUgO_wfuFo6GR_vii-RNv5iGo8ibUr'
b'he6Z0-AABAAocy9m9ToxeeZk-FkgjFh1x839Ims4peTy2C5MdawIwoa9wlIDbD-w'
b'GmiGO4QdrQ1lSntqUAUMkcGAzB0Q6SsAA')
# process own Val receipt in Val's Kevery so have copy in own log
Parser().processOne(ims=bytearray(rmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(rmsg)) # process copy of rmsg
# attach reciept message to existing message with val's incept message
vmsg.extend(rmsg)
# Simulate send to coe of val's incept and val's receipt of coe's inception message
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process val's incept and receipt
# check if val Kever in coe's .kevers
assert valpre in coeKevery.kevers
# check if receipt quadruple from val in receipt database
result = coeKevery.db.getVrcs(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == (valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQM0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24E'
b'nxUgO_wfuFo6GR_vii-RNv5iGo8ibUrhe6Z0AAocy9m9ToxeeZk-FkgjFh1x839Ims4peTy2C5Md'
b'awIwoa9wlIDbD-wGmiGO4QdrQ1lSntqUAUMkcGAzB0Q6SsAA')
# create receipt to escrow use invalid dig and sn so not in coe's db
fake = reserder.dig # some other dig
reserder = receipt(pre=coeK.prefixer.qb64,
sn=10,
dig=fake)
# sign event not receipt
siger = valSigners[vesn].sign(ser=coeIcpRaw, index=0) # return Siger if index
# create message
vmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert vmsg == (b'{"v":"KERI10JSON000091_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"a","t":"rct","d":"EpX7M0uQUxXFaY80_-O6Tn3xGRe9_'
b'unGqSTN8a9bAnTw"}-FABED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQ'
b'M0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24EnxUgO_wfuFo6GR_vii-RNv5iGo8ibUr'
b'he6Z0-AABAAocy9m9ToxeeZk-FkgjFh1x839Ims4peTy2C5MdawIwoa9wlIDbD-w'
b'GmiGO4QdrQ1lSntqUAUMkcGAzB0Q6SsAA')
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process the escrow receipt from val
# check if receipt quadruple in escrow database
result = coeKevery.db.getVres(key=snKey(pre=coeKever.prefixer.qb64,
sn=10))
assert bytes(result[0]) == (fake.encode("utf-8") +
valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
# Send receipt from coe to val
# create receipt of val's inception
# create seal of coe's last est event
seal = SealEvent(i=coepre,
s="{:x}".format(coeKever.lastEst.s),
d=coeKever.lastEst.d)
valK = coeKevery.kevers[valpre] # lookup valKever from coe's .kevers
# create validator receipt
reserder = receipt(pre=valK.prefixer.qb64,
sn=valK.sn,
dig=valK.serder.diger.qb64)
# sign vals's event not receipt
# look up event to sign from coe's kever for val
valIcpDig = bytes(coeKevery.db.getKeLast(key=snKey(pre=valpre, sn=vsn)))
assert valIcpDig == valK.serder.diger.qb64b == b'EeGqW24EnxUgO_wfuFo6GR_vii-RNv5iGo8ibUrhe6Z0'
valIcpRaw = bytes(coeKevery.db.getEvt(key=dgKey(pre=valpre, dig=valIcpDig)))
assert valIcpRaw == (b'{"v":"KERI10JSON0000ed_","i":"ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQM",'
b'"s":"0","t":"icp","kt":"1","k":["D8KY1sKmgyjAiUDdUBPNPyrSz_ad_Qf9yzhDNZlEKiM'
b'c"],"n":"EOWDAJvex5dZzDxeHBANyaIoUG3F4-ic81G6GwtnC4f4","bt":"0","b":[],"c":['
b'],"a":[]}')
siger = coeSigners[vesn].sign(ser=valIcpRaw, index=0) # return Siger if index
assert siger.qb64 == 'AAv33KFtbsfcOWbefKGnlf1hbtypw8RFtLZ-tdpZ3Purcs2YA1q1PDInwgm8nRV57M8dtRUG62DrNVtE7t8onjAA'
# create receipt message
cmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert cmsg == (b'{"v":"KERI10JSON000091_","i":"ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fj'
b'ew2KMl3FQM","s":"0","t":"rct","d":"EeGqW24EnxUgO_wfuFo6GR_vii-RN'
b'v5iGo8ibUrhe6Z0"}-FABEQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq93bjPu5wuq'
b'A0AAAAAAAAAAAAAAAAAAAAAAAEXeKMHPw0ql8vHiBOpo72AOrOsWZ3bRDL-DKkYH'
b'o4v6w-AABAAv33KFtbsfcOWbefKGnlf1hbtypw8RFtLZ-tdpZ3Purcs2YA1q1PDI'
b'nwgm8nRV57M8dtRUG62DrNVtE7t8onjAA')
# coe process own receipt in own Kevery so have copy in own log
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # make copy
# Simulate send to val of coe's receipt of val's inception message
Parser().process(ims=cmsg, kvy=valKevery)
# valKevery.process(ims=cmsg) # coe process val's incept and receipt
# check if receipt quadruple from coe in val's receipt database
result = valKevery.db.getVrcs(key=dgKey(pre=valKever.prefixer.qb64,
dig=valKever.serder.diger.qb64))
assert bytes(result[0]) == (coeKever.prefixer.qb64b +
Seqner(sn=coeKever.sn).qb64b +
coeKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq93bjPu5wuqA0AAAAAAAAAAAAAAAAAAAAAAAEXeKMHPw'
b'0ql8vHiBOpo72AOrOsWZ3bRDL-DKkYHo4v6wAAv33KFtbsfcOWbefKGnlf1hbtypw8RFtLZ-tdpZ'
b'3Purcs2YA1q1PDInwgm8nRV57M8dtRUG62DrNVtE7t8onjAA')
# Coe Event 1 RotationTransferable
csn += 1
cesn += 1
assert csn == cesn == 1
coeSerder = rotate(pre=coeKever.prefixer.qb64,
keys=[coeSigners[cesn].verfer.qb64],
dig=coeKever.serder.diger.qb64,
nxt=Nexter(keys=[coeSigners[cesn+1].verfer.qb64]).qb64,
sn=csn)
coe_event_digs.append(coeSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[cesn].sign(coeSerder.raw, index=0) # returns siger
# create serialized message
cmsg = bytearray(coeSerder.raw)
cmsg.extend(counter.qb64b)
cmsg.extend(siger.qb64b)
assert cmsg == bytearray(b'{"v":"KERI10JSON000122_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"1","t":"rot","p":"EXeKMHPw0ql8vHiBOpo72AOrOsWZ3'
b'bRDL-DKkYHo4v6w","kt":"1","k":["DVcuJOOJF1IE8svqEtrSuyQjGTd2HhfA'
b'kt9y2QkUtFJI"],"n":"E-dapdcC6XR1KWmWDsNl4J_OxcGxNZw1Xd95JH5a34fI'
b'","bt":"0","br":[],"ba":[],"a":[]}-AABAAegis-9tsF45eBLhvqsztm-lA'
b'LFkFK_T1epqHvOy3EkDVmk5g3sFps_ahsqXh7Ahitcbi-dWzRjh2ZsfzsB4OBw')
# update coe's key event verifier state
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # make copy
# verify coe's copy of coe's event stream is updated
assert coeKever.sn == csn
assert coeKever.serder.diger.qb64 == coeSerder.dig
# simulate send message from coe to val
Parser().process(ims=cmsg, kvy=valKevery)
# valKevery.process(ims=cmsg)
# verify val's copy of coe's event stream is updated
assert coeK.sn == csn
assert coeK.serder.diger.qb64 == coeSerder.dig
# create receipt of coe's rotation
# create seal of val's last est event
seal = SealEvent(i=valpre,
s="{:x}".format(valKever.lastEst.s),
d=valKever.lastEst.d)
# create validator receipt
reserder = receipt(pre=coeK.prefixer.qb64,
sn=coeK.sn,
dig=coeK.serder.diger.qb64)
# sign coe's event not receipt
# look up event to sign from val's kever for coe
coeRotDig = bytes(valKevery.db.getKeLast(key=snKey(pre=coepre, sn=csn)))
assert coeRotDig == coeK.serder.diger.qb64b == b'EQK8BvEIsvM9r3VGd1Qi10Gzzllodv0Vmnl7nl_a05eY'
coeRotRaw = bytes(valKevery.db.getEvt(key=dgKey(pre=coepre, dig=coeRotDig)))
assert coeRotRaw == (b'{"v":"KERI10JSON000122_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq93bjPu5wuqA",'
b'"s":"1","t":"rot","p":"EXeKMHPw0ql8vHiBOpo72AOrOsWZ3bRDL-DKkYHo4v6w","kt":"1'
b'","k":["DVcuJOOJF1IE8svqEtrSuyQjGTd2HhfAkt9y2QkUtFJI"],"n":"E-dapdcC6XR1KWmW'
b'DsNl4J_OxcGxNZw1Xd95JH5a34fI","bt":"0","br":[],"ba":[],"a":[]}')
siger = valSigners[vesn].sign(ser=coeRotRaw, index=0) # return Siger if index
assert siger.qb64 == 'AAVFlKWaMfEee7tvDO7PLUVoCfbylu4zDH8Lrj2tmbsInqIN5oNSupo7S_j7AnCOdBaGf9WQMwwfHkeQpayCCyAA'
# val create receipt message
vmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert vmsg == (b'{"v":"KERI10JSON000091_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"1","t":"rct","d":"EQK8BvEIsvM9r3VGd1Qi10Gzzllod'
b'v0Vmnl7nl_a05eY"}-FABED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQ'
b'M0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24EnxUgO_wfuFo6GR_vii-RNv5iGo8ibUr'
b'he6Z0-AABAAVFlKWaMfEee7tvDO7PLUVoCfbylu4zDH8Lrj2tmbsInqIN5oNSupo'
b'7S_j7AnCOdBaGf9WQMwwfHkeQpayCCyAA')
# val process own receipt in own kevery so have copy in own log
Parser().processOne(ims=bytearray(vmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(vmsg)) # make copy
# Simulate send to coe of val's receipt of coe's rotation message
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process val's incept and receipt
# check if receipt quadruple from val in receipt database
result = coeKevery.db.getVrcs(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == (valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQM0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24E'
b'nxUgO_wfuFo6GR_vii-RNv5iGo8ibUrhe6Z0AAVFlKWaMfEee7tvDO7PLUVoCfbylu4zDH8Lrj2t'
b'mbsInqIN5oNSupo7S_j7AnCOdBaGf9WQMwwfHkeQpayCCyAA')
# Next Event 2 Coe Interaction
csn += 1 # do not increment esn
assert csn == 2
assert cesn == 1
coeSerder = interact(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64,
sn=csn)
coe_event_digs.append(coeSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[cesn].sign(coeSerder.raw, index=0)
# create msg
cmsg = bytearray(coeSerder.raw)
cmsg.extend(counter.qb64b)
cmsg.extend(siger.qb64b)
assert cmsg == bytearray(b'{"v":"KERI10JSON000098_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"2","t":"ixn","p":"EQK8BvEIsvM9r3VGd1Qi10Gzzllod'
b'v0Vmnl7nl_a05eY","a":[]}-AABAAN4jrweIzLru2oSHdSeikFxXQcArKO2tzAa'
b'6ThfyBvUJQ8e6OsjvuV7wmmsa817kHYUjXsLx0OjKuCWV_VivMBg')
# update coe's key event verifier state
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # make copy
# verify coe's copy of coe's event stream is updated
assert coeKever.sn == csn
assert coeKever.serder.diger.qb64 == coeSerder.dig
# simulate send message from coe to val
Parser().process(ims=cmsg, kvy=valKevery)
# valKevery.process(ims=cmsg)
# verify val's copy of coe's event stream is updated
assert coeK.sn == csn
assert coeK.serder.diger.qb64 == coeSerder.dig
# create receipt of coe's interaction
# create seal of val's last est event
seal = SealEvent(i=valpre,
s="{:x}".format(valKever.lastEst.s),
d=valKever.lastEst.d)
# create validator receipt
reserder = receipt(pre=coeK.prefixer.qb64,
sn=coeK.sn,
dig=coeK.serder.diger.qb64)
# sign coe's event not receipt
# look up event to sign from val's kever for coe
coeIxnDig = bytes(valKevery.db.getKeLast(key=snKey(pre=coepre, sn=csn)))
assert coeIxnDig == coeK.serder.diger.qb64b == b'EaQ7xvXmDpg7twdzEUEJGokBS6S_TXzmlY1a93ZQkx84'
coeIxnRaw = bytes(valKevery.db.getEvt(key=dgKey(pre=coepre, dig=coeIxnDig)))
assert coeIxnRaw == (b'{"v":"KERI10JSON000098_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq93bjPu5wuqA",'
b'"s":"2","t":"ixn","p":"EQK8BvEIsvM9r3VGd1Qi10Gzzllodv0Vmnl7nl_a05eY","a":[]}')
siger = valSigners[vesn].sign(ser=coeIxnRaw, index=0) # return Siger if index
assert siger.qb64 == 'AAFyuqh4gxmfs7NcXRV5RN6iL2_OJAtYbDjBv3oL_UwFyolhS1EhBBjeLXvsCAVXOqrj7GMW9t3tpxL7Xtfsc5Bw'
# create receipt message
vmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert vmsg == (b'{"v":"KERI10JSON000091_","i":"EQf1hzB6s5saaQPdDAsEzSMEFoQx_WLsq9'
b'3bjPu5wuqA","s":"2","t":"rct","d":"EaQ7xvXmDpg7twdzEUEJGokBS6S_T'
b'XzmlY1a93ZQkx84"}-FABED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQ'
b'M0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24EnxUgO_wfuFo6GR_vii-RNv5iGo8ibUr'
b'he6Z0-AABAAFyuqh4gxmfs7NcXRV5RN6iL2_OJAtYbDjBv3oL_UwFyolhS1EhBBj'
b'eLXvsCAVXOqrj7GMW9t3tpxL7Xtfsc5Bw')
# val process own receipt in own kevery so have copy in own log
Parser().processOne(ims=bytearray(vmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(vmsg)) # make copy
# Simulate send to coe of val's receipt of coe's rotation message
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process val's incept and receipt
# check if receipt quadruple from val in receipt database
result = coeKevery.db.getVrcs(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == (valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'ED9EB3sA5u2vCPOEmX3d7bEyHiSh7Xi8fjew2KMl3FQM0AAAAAAAAAAAAAAAAAAAAAAAEeGqW24E'
b'nxUgO_wfuFo6GR_vii-RNv5iGo8ibUrhe6Z0AAFyuqh4gxmfs7NcXRV5RN6iL2_OJAtYbDjBv3oL'
b'_UwFyolhS1EhBBjeLXvsCAVXOqrj7GMW9t3tpxL7Xtfsc5Bw')
# verify final coe event state
assert coeKever.verfers[0].qb64 == coeSigners[cesn].verfer.qb64
assert coeKever.sn == coeK.sn == csn
db_digs = [bytes(v).decode("utf-8") for v in coeKever.baser.getKelIter(coepre)]
assert len(db_digs) == len(coe_event_digs) == csn+1
assert db_digs == coe_event_digs == ['EXeKMHPw0ql8vHiBOpo72AOrOsWZ3bRDL-DKkYHo4v6w',
'EQK8BvEIsvM9r3VGd1Qi10Gzzllodv0Vmnl7nl_a05eY',
'EaQ7xvXmDpg7twdzEUEJGokBS6S_TXzmlY1a93ZQkx84']
db_digs = [bytes(v).decode("utf-8") for v in valKever.baser.getKelIter(coepre)]
assert len(db_digs) == len(coe_event_digs) == csn+1
assert db_digs == coe_event_digs
# verify final val event state
assert valKever.verfers[0].qb64 == valSigners[vesn].verfer.qb64
assert valKever.sn == valK.sn == vsn
db_digs = [bytes(v).decode("utf-8") for v in valKever.baser.getKelIter(valpre)]
assert len(db_digs) == len(val_event_digs) == vsn+1
assert db_digs == val_event_digs == ['EeGqW24EnxUgO_wfuFo6GR_vii-RNv5iGo8ibUrhe6Z0']
db_digs = [bytes(v).decode("utf-8") for v in coeKever.baser.getKelIter(valpre)]
assert len(db_digs) == len(val_event_digs) == vsn+1
assert db_digs == val_event_digs
assert not os.path.exists(valKevery.db.path)
assert not os.path.exists(coeKever.baser.path)
""" Done Test """
def test_direct_mode_cbor_mgpk():
"""
Test direct mode with transverable validator event receipts but using
cbor and mspk serializations
"""
# manual process to generate a list of secrets
# root = pysodium.randombytes(pysodium.crypto_pwhash_SALTBYTES)
# secrets = generateSecrets(root=root, count=8)
# Direct Mode initiated by coe is controller, val is validator
# but goes both ways once initiated.
# set of secrets (seeds for private keys)
coeSecrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
# create coe signers
coeSigners = [Signer(qb64=secret) for secret in coeSecrets]
assert [signer.qb64 for signer in coeSigners] == coeSecrets
# set of secrets (seeds for private keys)
valSecrets = ['AgjD4nRlycmM5cPcAkfOATAp8wVldRsnc9f1tiwctXlw',
'AKUotEE0eAheKdDJh9QvNmSEmO_bjIav8V_GmctGpuCQ',
'AK-nVhMMJciMPvmF5VZE_9H-nhrgng9aJWf7_UHPtRNM',
'AT2cx-P5YUjIw_SLCHQ0pqoBWGk9s4N1brD-4pD_ANbs',
'Ap5waegfnuP6ezC18w7jQiPyQwYYsp9Yv9rYMlKAYL8k',
'Aqlc_FWWrxpxCo7R12uIz_Y2pHUH2prHx1kjghPa8jT8',
'AagumsL8FeGES7tYcnr_5oN6qcwJzZfLKxoniKUpG4qc',
'ADW3o9m3udwEf0aoOdZLLJdf1aylokP0lwwI_M2J9h0s']
# create val signers
valSigners = [Signer(qb64=secret) for secret in valSecrets]
assert [signer.qb64 for signer in valSigners] == valSecrets
with openDB("controller") as coeLogger, openDB("validator") as valLogger:
# init Keverys
coeKevery = Kevery(db=coeLogger)
valKevery = Kevery(db=valLogger)
coe_event_digs = [] # list of coe's own event log digs to verify against database
val_event_digs = [] # list of val's own event log digs to verify against database
# init sequence numbers for both coe and val
csn = cesn = 0 # sn and last establishment sn = esn
vsn = vesn = 0 # sn and last establishment sn = esn
# Coe Event 0 Inception Transferable (nxt digest not empty)
coeSerder = incept(keys=[coeSigners[cesn].verfer.qb64],
nxt=Nexter(keys=[coeSigners[cesn+1].verfer.qb64]).qb64,
code=MtrDex.Blake3_256,
kind=Serials.cbor)
assert csn == int(coeSerder.ked["s"], 16) == 0
coepre = coeSerder.ked["i"]
coe_event_digs.append(coeSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[cesn].sign(coeSerder.raw, index=0) # return Siger if index
# create serialized message
cmsg = bytearray(coeSerder.raw)
cmsg.extend(counter.qb64b)
cmsg.extend(siger.qb64b)
assert cmsg == bytearray(b'\xabavqKERI10CBOR0000c3_aix,EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQT'
b'Dqe1wasa0atcicpbkta1ak\x81x,DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWT'
b'OunRAanx,EPYuj8mq_PYYsoBKkzX1kxSPGYBWaIya3slgCOyOtlqUbbta0ab'
b'\x80ac\x80aa\x80-AABAAEiOvc6Yv5oBR6tHMAOJvAg_YKkHR4ifXF2mIsPYmDjE'
b'q-WL3uimpNHMdGoSRVmXCotuCfDGTx3h0Gk5tz2jFBA')
# create own Coe Kever in Coe's Kevery
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # send copy of cmsg
coeKever = coeKevery.kevers[coepre]
assert coeKever.prefixer.qb64 == coepre
# Val Event 0 Inception Transferable (nxt digest not empty)
valSerder = incept(keys=[valSigners[vesn].verfer.qb64],
nxt=Nexter(keys=[valSigners[vesn+1].verfer.qb64]).qb64,
code=MtrDex.Blake3_256,
kind=Serials.mgpk)
assert vsn == int(valSerder.ked["s"], 16) == 0
valpre = valSerder.ked["i"]
val_event_digs.append(valSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = valSigners[vesn].sign(valSerder.raw, index=0) # return Siger if index
# create serialized message
vmsg = bytearray(valSerder.raw)
vmsg.extend(counter.qb64b)
vmsg.extend(siger.qb64b)
assert vmsg == bytearray(b'\x8b\xa1v\xb1KERI10MGPK0000c3_\xa1i\xd9,EPtxTO5d2FgPXkav6cS9NEPvcxh'
b'fomsfKi3fvyMMuhZw\xa1s\xa10\xa1t\xa3icp\xa2kt\xa11\xa1k\x91\xd9,D8K'
b'Y1sKmgyjAiUDdUBPNPyrSz_ad_Qf9yzhDNZlEKiMc\xa1n\xd9,EOWDAJvex5dZzDx'
b'eHBANyaIoUG3F4-ic81G6GwtnC4f4\xa2bt\xa10\xa1b\x90\xa1c\x90'
b'\xa1a\x90-AABAABD5vMFZKjWg2GBWr0mXxYCqGUMXLNRL_4cn6dunE9eHFVst5CkV'
b'd8Oam6ZuDV3IPHoy1FAXJrxouHuiAKKmRCg')
# create own Val Kever in Val's Kevery
Parser().processOne(ims=bytearray(vmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(vmsg)) # send copy of vmsg
valKever = valKevery.kevers[valpre]
assert valKever.prefixer.qb64 == valpre
# simulate sending of coe's inception message to val
Parser().process(ims=bytearray(cmsg), kvy=valKevery)
# valKevery.process(ims=bytearray(cmsg)) # make copy of msg
assert coepre in valKevery.kevers # creates Kever for coe in val's .kevers
# create receipt of coe's inception
# create seal of val's last est event
seal = SealEvent(i=valpre,
s="{:x}".format(valKever.lastEst.s),
d=valKever.lastEst.d)
coeK = valKevery.kevers[coepre] # lookup coeKever from val's .kevers
# create validator receipt
reserder = receipt(pre=coeK.prefixer.qb64,
sn=coeK.sn,
dig=coeK.serder.diger.qb64,
kind=Serials.mgpk)
# sign coe's event not receipt
# look up event to sign from val's kever for coe
coeIcpDig = bytes(valKevery.db.getKeLast(key=snKey(pre=coepre, sn=csn)))
assert coeIcpDig == coeK.serder.diger.qb64b
coeIcpRaw = bytes(valKevery.db.getEvt(key=dgKey(pre=coepre, dig=coeIcpDig)))
assert coeIcpRaw == (b'\xabavqKERI10CBOR0000c3_aix,EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQTDqe1wasa'
b'0atcicpbkta1ak\x81x,DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOoeKtWTOunRAanx,EPYuj8m'
b'q_PYYsoBKkzX1kxSPGYBWaIya3slgCOyOtlqUbbta0ab\x80ac\x80aa\x80')
siger = valSigners[vesn].sign(ser=coeIcpRaw, index=0) # return Siger if index
# process own Val receipt in Val's Kevery so have copy in own log
rmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert rmsg == (b'\x85\xa1v\xb1KERI10MGPK00007f_\xa1i\xd9,EVA41BuPTjEEtBXWwBSeD4yRHz_'
b'TyyzfGE1ldQTDqe1w\xa1s\xa10\xa1t\xa3rct\xa1d\xd9,ETtM9-qTtHK-KKnFe'
b'rFmtFQAaw70dGBFOQwz7tp85w6E-FABEPtxTO5d2FgPXkav6cS9NEPvcxhfomsfK'
b'i3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqduuxWenrs4SlX0RFV-VmcU'
b'-SSywAKr5PZV-0k-AABAA-ul0YeOLwffGCc5GUnAvgzwITCF2KXLfAzSbCANOkbr'
b'apY5w0Ybyeyiy1jTB_3OPEWa0_3tEMt6wZpb2zqICCw')
Parser().processOne(ims=bytearray(rmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(rmsg)) # process copy of rmsg
# attach reciept message to existing message with val's incept message
vmsg.extend(rmsg)
# Simulate send to coe of val's receipt of coe's inception message
Parser().process(ims=bytearray(vmsg), kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process val's incept and receipt
# check if val Kever in coe's .kevers
assert valpre in coeKevery.kevers
# check if receipt quadruple from val in receipt database
result = coeKevery.db.getVrcs(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == (valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'EPtxTO5d2FgPXkav6cS9NEPvcxhfomsfKi3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqd'
b'uuxWenrs4SlX0RFV-VmcU-SSywAKr5PZV-0kAA-ul0YeOLwffGCc5GUnAvgzwITCF2KXLfAzSbCA'
b'NOkbrapY5w0Ybyeyiy1jTB_3OPEWa0_3tEMt6wZpb2zqICCw')
# create receipt to escrow use invalid dig so not in coe's db
fake = reserder.dig # some other dig
reserder = receipt(pre=coeK.prefixer.qb64,
sn=10,
dig=fake,
kind=Serials.mgpk)
# sign event not receipt
siger = valSigners[vesn].sign(ser=coeIcpRaw, index=0) # return Siger if index
# create message
vmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert vmsg == (b'\x85\xa1v\xb1KERI10MGPK00007f_\xa1i\xd9,EVA41BuPTjEEtBXWwBSeD4yRHz_'
b'TyyzfGE1ldQTDqe1w\xa1s\xa1a\xa1t\xa3rct\xa1d\xd9,Eo-3IvwDgH3uR3M0z'
b'Op8eYnrZTDHibWlhkISlU_HxRek-FABEPtxTO5d2FgPXkav6cS9NEPvcxhfomsfK'
b'i3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqduuxWenrs4SlX0RFV-VmcU'
b'-SSywAKr5PZV-0k-AABAA-ul0YeOLwffGCc5GUnAvgzwITCF2KXLfAzSbCANOkbr'
b'apY5w0Ybyeyiy1jTB_3OPEWa0_3tEMt6wZpb2zqICCw')
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process the escrow receipt from val
# check if in escrow database
result = coeKevery.db.getVres(key=snKey(pre=coeKever.prefixer.qb64,
sn=10))
assert bytes(result[0]) == (fake.encode("utf-8") +
valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
# Send receipt from coe to val
# create receipt of val's inception
# create seal of coe's last est event
seal = SealEvent(i=coepre,
s="{:x}".format(coeKever.lastEst.s),
d=coeKever.lastEst.d)
valK = coeKevery.kevers[valpre] # lookup valKever from coe's .kevers
# create validator receipt
reserder = receipt(pre=valK.prefixer.qb64,
sn=valK.sn,
dig=valK.serder.diger.qb64,
kind=Serials.cbor)
# sign vals's event not receipt
# look up event to sign from coe's kever for val
valIcpDig = bytes(coeKevery.db.getKeLast(key=snKey(pre=valpre, sn=vsn)))
assert valIcpDig == valK.serder.diger.qb64b
valIcpRaw = bytes(coeKevery.db.getEvt(key=dgKey(pre=valpre, dig=valIcpDig)))
assert valIcpRaw == (b'\x8b\xa1v\xb1KERI10MGPK0000c3_\xa1i\xd9,EPtxTO5d2FgPXkav6cS9NEPvcxhfomsfKi3'
b'fvyMMuhZw\xa1s\xa10\xa1t\xa3icp\xa2kt\xa11\xa1k\x91\xd9,D8KY1sKmgyjAiUDdUBP'
b'NPyrSz_ad_Qf9yzhDNZlEKiMc\xa1n\xd9,EOWDAJvex5dZzDxeHBANyaIoUG3F4-ic81G6Gwt'
b'nC4f4\xa2bt\xa10\xa1b\x90\xa1c\x90\xa1a\x90')
siger = coeSigners[vesn].sign(ser=valIcpRaw, index=0) # return Siger if index
# create receipt message
cmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert cmsg == (b'\xa5avqKERI10CBOR00007f_aix,EPtxTO5d2FgPXkav6cS9NEPvcxhfomsfKi3fvyM'
b'MuhZwasa0atcrctadx,EER-hNqduuxWenrs4SlX0RFV-VmcU-SSywAKr5PZV-0k-'
b'FABEVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQTDqe1w0AAAAAAAAAAAAAAAA'
b'AAAAAAAETtM9-qTtHK-KKnFerFmtFQAaw70dGBFOQwz7tp85w6E-AABAAJiCMw2T'
b'oI_EUZpMQh9TZ59CPmV40IiFtY01yW-7pXSVZO5Bd7Ldj8mgJvTxMs9zMVDcSKsT'
b'KRBLMqtjeJjcSCA')
# coe process own receipt in own Kevery so have copy in own log
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # make copy
# Simulate send to val of coe's receipt of val's inception message
Parser().process(ims=cmsg, kvy=valKevery)
# valKevery.process(ims=cmsg) # coe process val's incept and receipt
# check if receipt from coe in val's receipt database
result = valKevery.db.getVrcs(key=dgKey(pre=valKever.prefixer.qb64,
dig=valKever.serder.diger.qb64))
assert bytes(result[0]) == (coeKever.prefixer.qb64b +
Seqner(sn=coeKever.sn).qb64b +
coeKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQTDqe1w0AAAAAAAAAAAAAAAAAAAAAAAETtM9-qT'
b'tHK-KKnFerFmtFQAaw70dGBFOQwz7tp85w6EAAJiCMw2ToI_EUZpMQh9TZ59CPmV40IiFtY01yW-'
b'7pXSVZO5Bd7Ldj8mgJvTxMs9zMVDcSKsTKRBLMqtjeJjcSCA')
# Coe RotationTransferable
csn += 1
cesn += 1
assert csn == cesn == 1
coeSerder = rotate(pre=coeKever.prefixer.qb64,
keys=[coeSigners[cesn].verfer.qb64],
dig=coeKever.serder.diger.qb64,
nxt=Nexter(keys=[coeSigners[cesn+1].verfer.qb64]).qb64,
sn=csn,
kind=Serials.cbor)
coe_event_digs.append(coeSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[cesn].sign(coeSerder.raw, index=0) # returns siger
# create serialized message
cmsg = bytearray(coeSerder.raw)
cmsg.extend(counter.qb64b)
cmsg.extend(siger.qb64b)
assert cmsg == bytearray(b'\xacavqKERI10CBOR0000f5_aix,EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQT'
b'Dqe1wasa1atcrotapx,ETtM9-qTtHK-KKnFerFmtFQAaw70dGBFOQwz7tp85w6Eb'
b'kta1ak\x81x,DVcuJOOJF1IE8svqEtrSuyQjGTd2HhfAkt9y2QkUtFJIanx,E-dapdc'
b'C6XR1KWmWDsNl4J_OxcGxNZw1Xd95JH5a34fIbbta0bbr\x80bba\x80aa\x80-AA'
b'BAAeAYbccb63ysFIn_4Yji_H7ofLyCiDLReIvuRHrFkl_i5A4xQGixszS8Xbkz8S'
b'3W6hcXlNr4-1_m7qh4HEbCoCw')
# update coe's key event verifier state
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # make copy
# verify coe's copy of coe's event stream is updated
assert coeKever.sn == csn
assert coeKever.serder.diger.qb64 == coeSerder.dig
# simulate send message from coe to val
Parser().process(ims=cmsg, kvy=valKevery)
# valKevery.process(ims=cmsg)
# verify val's copy of coe's event stream is updated
assert coeK.sn == csn
assert coeK.serder.diger.qb64 == coeSerder.dig
# create receipt of coe's rotation
# create seal of val's last est event
seal = SealEvent(i=valpre,
s="{:x}".format(valKever.lastEst.s),
d=valKever.lastEst.d)
# create validator receipt
reserder = receipt(pre=coeK.prefixer.qb64,
sn=coeK.sn,
dig=coeK.serder.diger.qb64,
kind=Serials.mgpk)
# sign coe's event not receipt
# look up event to sign from val's kever for coe
coeRotDig = bytes(valKevery.db.getKeLast(key=snKey(pre=coepre, sn=csn)))
assert coeRotDig == coeK.serder.diger.qb64b
coeRotRaw = bytes(valKevery.db.getEvt(key=dgKey(pre=coepre, dig=coeRotDig)))
assert coeRotRaw == (b'\xacavqKERI10CBOR0000f5_aix,EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQTDqe1wasa'
b'1atcrotapx,ETtM9-qTtHK-KKnFerFmtFQAaw70dGBFOQwz7tp85w6Ebkta1ak\x81x,DVcuJOO'
b'JF1IE8svqEtrSuyQjGTd2HhfAkt9y2QkUtFJIanx,E-dapdcC6XR1KWmWDsNl4J_OxcGxNZw1Xd9'
b'5JH5a34fIbbta0bbr\x80bba\x80aa\x80')
siger = valSigners[vesn].sign(ser=coeRotRaw, index=0) # return Siger if index
# create receipt message
vmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert vmsg == (b'\x85\xa1v\xb1KERI10MGPK00007f_\xa1i\xd9,EVA41BuPTjEEtBXWwBSeD4yRHz_'
b'TyyzfGE1ldQTDqe1w\xa1s\xa11\xa1t\xa3rct\xa1d\xd9,EPHvonlASEhFuMiLw'
b'zbNp9cB9t2DJrcnvVvrK_P4G4Ss-FABEPtxTO5d2FgPXkav6cS9NEPvcxhfomsfK'
b'i3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqduuxWenrs4SlX0RFV-VmcU'
b'-SSywAKr5PZV-0k-AABAAVRZVF2PkwQe5oFFQoDM_efMgGHCKlf1denoAa1NJZ45'
b'jnz1AP1VeVmfSFLsjApfJmgCBp-mYw0mP8KwrolsICQ')
# val process own receipt in own kevery so have copy in own log
Parser().processOne(ims=bytearray(vmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(vmsg)) # make copy
# Simulate send to coe of val's receipt of coe's rotation message
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process val's incept and receipt
# check if receipt from val in receipt database
result = coeKevery.db.getVrcs(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == (valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'EPtxTO5d2FgPXkav6cS9NEPvcxhfomsfKi3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqd'
b'uuxWenrs4SlX0RFV-VmcU-SSywAKr5PZV-0kAAVRZVF2PkwQe5oFFQoDM_efMgGHCKlf1denoAa1'
b'NJZ45jnz1AP1VeVmfSFLsjApfJmgCBp-mYw0mP8KwrolsICQ')
# Next Event Coe Interaction
csn += 1 # do not increment esn
assert csn == 2
assert cesn == 1
coeSerder = interact(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64,
sn=csn,
kind=Serials.cbor)
coe_event_digs.append(coeSerder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = coeSigners[cesn].sign(coeSerder.raw, index=0)
# create msg
cmsg = bytearray(coeSerder.raw)
cmsg.extend(counter.qb64b)
cmsg.extend(siger.qb64b)
assert cmsg == bytearray(b'\xa6avqKERI10CBOR000082_aix,EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQT'
b'Dqe1wasa2atcixnapx,EPHvonlASEhFuMiLwzbNp9cB9t2DJrcnvVvrK_P4G4Ssa'
b'a\x80-AABAAW7s0SV5i1KMDOufvgTAkER4lhvxkKA6AacFa5G9ZX3UE0H65-GHV8MbM'
b'8zMmtirALCQSXfMXoODdLuclkJElBQ')
# update coe's key event verifier state
Parser().processOne(ims=bytearray(cmsg), kvy=coeKevery)
# coeKevery.processOne(ims=bytearray(cmsg)) # make copy
# verify coe's copy of coe's event stream is updated
assert coeKever.sn == csn
assert coeKever.serder.diger.qb64 == coeSerder.dig
# simulate send message from coe to val
Parser().process(ims=cmsg, kvy=valKevery)
# valKevery.process(ims=cmsg)
# verify val's copy of coe's event stream is updated
assert coeK.sn == csn
assert coeK.serder.diger.qb64 == coeSerder.dig
# create receipt of coe's interaction
# create seal of val's last est event
seal = SealEvent(i=valpre,
s="{:x}".format(valKever.lastEst.s),
d=valKever.lastEst.d)
# create validator receipt
reserder = receipt(pre=coeK.prefixer.qb64,
sn=coeK.sn,
dig=coeK.serder.diger.qb64,
kind=Serials.mgpk)
# sign coe's event not receipt
# look up event to sign from val's kever for coe
coeIxnDig = bytes(valKevery.db.getKeLast(key=snKey(pre=coepre, sn=csn)))
assert coeIxnDig == coeK.serder.diger.qb64b
coeIxnRaw = bytes(valKevery.db.getEvt(key=dgKey(pre=coepre, dig=coeIxnDig)))
assert coeIxnRaw == (b'\xa6avqKERI10CBOR000082_aix,EVA41BuPTjEEtBXWwBSeD4yRHz_TyyzfGE1ldQTDqe1wasa'
b'2atcixnapx,EPHvonlASEhFuMiLwzbNp9cB9t2DJrcnvVvrK_P4G4Ssaa\x80')
siger = valSigners[vesn].sign(ser=coeIxnRaw, index=0) # return Siger if index
# create receipt message
vmsg = messagize(serder=reserder, sigers=[siger], seal=seal)
assert vmsg == (b'\x85\xa1v\xb1KERI10MGPK00007f_\xa1i\xd9,EVA41BuPTjEEtBXWwBSeD4yRHz_'
b'TyyzfGE1ldQTDqe1w\xa1s\xa12\xa1t\xa3rct\xa1d\xd9,EOMlWNCvqWStLVUUB'
b'lhwEbo3slpOD6-iVCtPBEsW9ofQ-FABEPtxTO5d2FgPXkav6cS9NEPvcxhfomsfK'
b'i3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqduuxWenrs4SlX0RFV-VmcU'
b'-SSywAKr5PZV-0k-AABAAMkr7U_m3dAj9t4l6vBYp-KJaioD46oc5uUdwRzrt8bm'
b'yvx7qJuFeM_rv9mYvY_lHN5kP9BDjig6dD6Fheo2fDg')
# val process own receipt in own kevery so have copy in own log
Parser().processOne(ims=bytearray(vmsg), kvy=valKevery)
# valKevery.processOne(ims=bytearray(vmsg)) # make copy
# Simulate send to coe of val's receipt of coe's rotation message
Parser().process(ims=vmsg, kvy=coeKevery)
# coeKevery.process(ims=vmsg) # coe process val's incept and receipt
# check if receipt from val in receipt database
result = coeKevery.db.getVrcs(key=dgKey(pre=coeKever.prefixer.qb64,
dig=coeKever.serder.diger.qb64))
assert bytes(result[0]) == (valKever.prefixer.qb64b +
Seqner(sn=valKever.sn).qb64b +
valKever.serder.diger.qb64b +
siger.qb64b)
assert bytes(result[0]) == (b'EPtxTO5d2FgPXkav6cS9NEPvcxhfomsfKi3fvyMMuhZw0AAAAAAAAAAAAAAAAAAAAAAAEER-hNqd'
b'uuxWenrs4SlX0RFV-VmcU-SSywAKr5PZV-0kAAMkr7U_m3dAj9t4l6vBYp-KJaioD46oc5uUdwRz'
b'rt8bmyvx7qJuFeM_rv9mYvY_lHN5kP9BDjig6dD6Fheo2fDg')
# verify final coe event state
assert coeKever.verfers[0].qb64 == coeSigners[cesn].verfer.qb64
assert coeKever.sn == coeK.sn == csn
db_digs = [bytes(v).decode("utf-8") for v in coeKever.baser.getKelIter(coepre)]
assert len(db_digs) == len(coe_event_digs) == csn+1
assert db_digs == coe_event_digs == ['ETtM9-qTtHK-KKnFerFmtFQAaw70dGBFOQwz7tp85w6E',
'EPHvonlASEhFuMiLwzbNp9cB9t2DJrcnvVvrK_P4G4Ss',
'EOMlWNCvqWStLVUUBlhwEbo3slpOD6-iVCtPBEsW9ofQ']
db_digs = [bytes(v).decode("utf-8") for v in valKever.baser.getKelIter(coepre)]
assert len(db_digs) == len(coe_event_digs) == csn+1
assert db_digs == coe_event_digs
# verify final val event state
assert valKever.verfers[0].qb64 == valSigners[vesn].verfer.qb64
assert valKever.sn == valK.sn == vsn
db_digs = [bytes(v).decode("utf-8") for v in valKever.baser.getKelIter(valpre)]
assert len(db_digs) == len(val_event_digs) == vsn+1
assert db_digs == val_event_digs == ['EER-hNqduuxWenrs4SlX0RFV-VmcU-SSywAKr5PZV-0k']
db_digs = [bytes(v).decode("utf-8") for v in coeKever.baser.getKelIter(valpre)]
assert len(db_digs) == len(val_event_digs) == vsn+1
assert db_digs == val_event_digs
assert not os.path.exists(valKevery.db.path)
assert not os.path.exists(coeKever.baser.path)
""" Done Test """
def test_process_nontransferable():
"""
Test process of generating and validating key event messages
"""
# Ephemeral (Nontransferable) case
skp0 = Signer(transferable=False) # original signing keypair non transferable
assert skp0.code == MtrDex.Ed25519_Seed
assert skp0.verfer.code == MtrDex.Ed25519N
# Derive AID by merely assigning verifier public key
aid0 = Prefixer(qb64=skp0.verfer.qb64)
assert aid0.code == MtrDex.Ed25519N
# Ephemeral may be used without inception event
# but when used with inception event must be compatible event
sn = 0 # inception event so 0
sith = 1 # one signer
nxt = "" # non-transferable so nxt is empty
toad = 0 # no witnesses
nsigs = 1 # one attached signature unspecified index
ked0 = dict(v=Versify(kind=Serials.json, size=0),
i=aid0.qb64, # qual base 64 prefix
s="{:x}".format(sn), # hex string no leading zeros lowercase
t=Ilks.icp,
kt="{:x}".format(sith), # hex string no leading zeros lowercase
k=[aid0.qb64], # list of signing keys each qual Base64
n=nxt, # hash qual Base64
wt="{:x}".format(toad), # hex string no leading zeros lowercase
w=[], # list of qual Base64 may be empty
c=[], # list of config ordered mappings may be empty
)
# verify derivation of aid0 from ked0
assert aid0.verify(ked=ked0)
# Serialize ked0
tser0 = Serder(ked=ked0)
# sign serialization
tsig0 = skp0.sign(tser0.raw, index=0)
# verify signature
assert skp0.verfer.verify(tsig0.raw, tser0.raw)
# create attached sig counter
cnt0 = Counter(CtrDex.ControllerIdxSigs)
# create packet
msgb0 = bytearray(tser0.raw + cnt0.qb64b + tsig0.qb64b)
# deserialize packet
rser0 = Serder(raw=msgb0)
assert rser0.raw == tser0.raw
del msgb0[:rser0.size] # strip off event from front
# extract sig counter
rcnt0 = Counter(qb64=msgb0)
nrsigs = rcnt0.count
assert nrsigs == 1
del msgb0[:len(rcnt0.qb64)]
# extract attached sigs
keys = rser0.ked["k"]
for i in range(nrsigs): # verify each attached signature
rsig = Indexer(qb64=msgb0)
assert rsig.index == 0
verfer = Verfer(qb64=keys[rsig.index])
assert verfer.qb64 == aid0.qb64
assert verfer.qb64 == skp0.verfer.qb64
assert verfer.verify(rsig.raw, rser0.raw)
del msgb0[:len(rsig.qb64)]
# verify pre
raid0 = Prefixer(qb64=rser0.pre)
assert raid0.verify(ked=rser0.ked)
""" Done Test """
def test_process_transferable():
"""
Test process of generating and validating key event messages
"""
# Transferable case
# Setup inception key event dict
# create current key
sith = 1 # one signer
skp0 = Signer() # original signing keypair transferable default
assert skp0.code == MtrDex.Ed25519_Seed
assert skp0.verfer.code == MtrDex.Ed25519
keys = [skp0.verfer.qb64]
# create next key
skp1 = Signer() # next signing keypair transferable is default
assert skp1.code == MtrDex.Ed25519_Seed
assert skp1.verfer.code == MtrDex.Ed25519
nxtkeys = [skp1.verfer.qb64]
# compute nxt digest
nexter = Nexter(keys=nxtkeys)
nxt = nexter.qb64 # transferable so next is not empty
sn = 0 # inception event so 0
toad = 0 # no witnesses
nsigs = 1 # one attached signature unspecified index
ked0 = dict(v=Versify(kind=Serials.json, size=0),
i="", # qual base 64 prefix
s="{:x}".format(sn), # hex string no leading zeros lowercase
t=Ilks.icp,
kt="{:x}".format(sith), # hex string no leading zeros lowercase
k=keys, # list of signing keys each qual Base64
n=nxt, # hash qual Base64
wt="{:x}".format(toad), # hex string no leading zeros lowercase
w=[], # list of qual Base64 may be empty
c=[],
)
# Derive AID from ked
aid0 = Prefixer(ked=ked0, code=MtrDex.Ed25519)
assert aid0.code == MtrDex.Ed25519
assert aid0.qb64 == skp0.verfer.qb64
# update ked with pre
ked0["i"] = aid0.qb64
# Serialize ked0
tser0 = Serder(ked=ked0)
# sign serialization
tsig0 = skp0.sign(tser0.raw, index=0)
# verify signature
assert skp0.verfer.verify(tsig0.raw, tser0.raw)
# create attached sig counter
cnt0 = Counter(CtrDex.ControllerIdxSigs)
# create packet
msgb0 = bytearray(tser0.raw + cnt0.qb64b + tsig0.qb64b)
# deserialize packet
rser0 = Serder(raw=msgb0)
assert rser0.raw == tser0.raw
del msgb0[:rser0.size] # strip off event from front
# extract sig counter
rcnt0 = Counter(qb64=msgb0)
nrsigs = rcnt0.count
assert nrsigs == 1
del msgb0[:len(rcnt0.qb64)]
# extract attached sigs
keys = rser0.ked["k"]
for i in range(nrsigs): # verify each attached signature
rsig = Indexer(qb64=msgb0)
assert rsig.index == 0
verfer = Verfer(qb64=keys[rsig.index])
assert verfer.qb64 == aid0.qb64
assert verfer.qb64 == skp0.verfer.qb64
assert verfer.verify(rsig.raw, rser0.raw)
del msgb0[:len(rsig.qb64)]
# verify pre
raid0 = Prefixer(qb64=rser0.pre)
assert raid0.verify(ked=rser0.ked)
#verify nxt digest from event is still valid
rnxt1 = Nexter(qb64=rser0.ked["n"])
assert rnxt1.verify(keys=nxtkeys)
""" Done Test """
def test_process_manual():
"""
Test manual process of generating and validating inception key event message
"""
# create qualified pre in basic format
# workflow is start with seed and save seed. Seed in this case is 32 bytes
# aidseed = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
aidseed = b'p6\xac\xb7\x10R\xc4\x9c7\xe8\x97\xa3\xdb!Z\x08\xdf\xfaR\x07\x9a\xb3\x1e\x9d\xda\xee\xa2\xbc\xe4;w\xae'
assert len(aidseed) == 32
# create and save verkey. Given we have sigseed and verkey then sigkey is
# redundant, that is, sigkey = sigseed + verkey. So we can easily recreate
# sigkey by concatenating sigseed + verkey.
verkey, sigkey = pysodium.crypto_sign_seed_keypair(aidseed)
assert verkey == b'\xaf\x96\xb0p\xfb0\xa7\xd0\xa4\x18\xc9\xdc\x1d\x86\xc2:\x98\xf7?t\x1b\xde.\xcc\xcb;\x8a\xb0\xa2O\xe7K'
assert len(verkey) == 32
# create qualified pre in basic format
aidmat = Matter(raw=verkey, code=MtrDex.Ed25519)
assert aidmat.qb64 == 'Dr5awcPswp9CkGMncHYbCOpj3P3Qb3i7MyzuKsKJP50s'
# create qualified next public key in basic format
nxtseed = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
nxtseed = b'm\x04\xf9\xe4\xd5`<\x91]>y\xe9\xe5$\xb6\xd8\xd5D\xb7\xea\xf6\x13\xd4\x08TYL\xb6\xc7 D\xc7'
assert len(nxtseed) == 32
# create and save verkey. Given we have sigseed and verkey then sigkey is
# redundant, that is, sigkey = sigseed + verkey. So we can easily recreate
# sigkey by concatenating sigseed + verkey.
verkey, sigkey = pysodium.crypto_sign_seed_keypair(nxtseed)
assert verkey == b'\xf5DOB:<\xcd\x16\x18\x9b\x83L\xa5\x0c\x98X\x90C\x1a\xb30O\xa5\x0f\xe39l\xa6\xdfX\x185'
assert len(verkey) == 32
# create qualified nxt key in basic format
nxtkeymat = Matter(raw=verkey, code=MtrDex.Ed25519)
assert nxtkeymat.qb64 == 'D9URPQjo8zRYYm4NMpQyYWJBDGrMwT6UP4zlspt9YGDU'
# create nxt digest
nxtsith = "{:x}".format(1) # lowecase hex no leading zeros
assert nxtsith == "1"
nxts = [] # create list to concatenate for hashing
nxts.append(nxtsith.encode("utf-8"))
nxts.append(nxtkeymat.qb64.encode("utf-8"))
nxtsraw = b''.join(nxts)
assert nxtsraw == b'1D9URPQjo8zRYYm4NMpQyYWJBDGrMwT6UP4zlspt9YGDU'
nxtdig = blake3.blake3(nxtsraw).digest()
assert nxtdig == b'\xdeWy\xd3=\xcb`\xce\xe9\x99\x0cF\xdd\xb2C6\x03\xa7F\rS\xd6\xfem\x99\x89\xac`<\xaa\x88\xd2'
nxtdigmat = Matter(raw=nxtdig, code=MtrDex.Blake3_256)
assert nxtdigmat.qb64 == 'E3ld50z3LYM7pmQxG3bJDNgOnRg1T1v5tmYmsYDyqiNI'
sn = 0
sith = 1
toad = 0
index = 0
#create key event dict
ked0 = dict(v=Versify(kind=Serials.json, size=0),
i=aidmat.qb64, # qual base 64 prefix
s="{:x}".format(sn), # hex string no leading zeros lowercase
t=Ilks.icp,
kt="{:x}".format(sith), # hex string no leading zeros lowercase
k=[aidmat.qb64], # list of signing keys each qual Base64
n=nxtdigmat.qb64, # hash qual Base64
wt ="{:x}".format(toad), # hex string no leading zeros lowercase
w=[], # list of qual Base64 may be empty
c=[], # list of config ordered mappings may be empty
)
txsrdr = Serder(ked=ked0, kind=Serials.json)
assert txsrdr.raw == (b'{"v":"KERI10JSON0000e6_","i":"Dr5awcPswp9CkGMncHYbCOpj3P3Qb3i7MyzuKsKJP50s",'
b'"s":"0","t":"icp","kt":"1","k":["Dr5awcPswp9CkGMncHYbCOpj3P3Qb3i7MyzuKsKJP50'
b's"],"n":"E3ld50z3LYM7pmQxG3bJDNgOnRg1T1v5tmYmsYDyqiNI","wt":"0","w":[],"c":['
b']}')
assert txsrdr.size == 230
txdig = blake3.blake3(txsrdr.raw).digest()
txdigmat = Matter(raw=txdig, code=MtrDex.Blake3_256)
assert txdigmat.qb64 == 'Ea-gtTKs7O4bJUXI5Rl7FM1xYgv-GtLd322iMGe0UZV8'
assert txsrdr.dig == txdigmat.qb64
sig0raw = pysodium.crypto_sign_detached(txsrdr.raw, aidseed + aidmat.raw) # sigkey = seed + verkey
assert len(sig0raw) == 64
result = pysodium.crypto_sign_verify_detached(sig0raw, txsrdr.raw, aidmat.raw)
assert not result # None if verifies successfully else raises ValueError
txsigmat = Indexer(raw=sig0raw, code=IdrDex.Ed25519_Sig, index=index)
assert txsigmat.qb64 == 'AAACj90Gx1W_YKEIKBuCB3H4_dNIUEXYpkm-oCW9MhnbqYqFKb4BhZU9PQRuVfExEPcvlrzzuxB-1B4ALXwOhqDQ'
assert len(txsigmat.qb64) == 88
assert txsigmat.index == index
msgb = txsrdr.raw + txsigmat.qb64.encode("utf-8")
assert len(msgb) == 318 # 230 + 88
# Recieve side
rxsrdr = Serder(raw=msgb)
assert rxsrdr.size == txsrdr.size
assert rxsrdr.ked == ked0
rxsigqb64 = msgb[rxsrdr.size:].decode("utf-8")
assert len(rxsigqb64) == len(txsigmat.qb64)
rxsigmat = Indexer(qb64=rxsigqb64)
assert rxsigmat.index == index
rxaidqb64 = rxsrdr.ked["i"]
rxaidmat = Matter(qb64=rxaidqb64)
assert rxaidmat.qb64 == aidmat.qb64
assert rxaidmat.code == MtrDex.Ed25519
rxverqb64 = rxsrdr.ked["k"][index]
rxvermat = Matter(qb64=rxverqb64)
assert rxvermat.qb64 == rxaidmat.qb64 # basic derivation same
result = pysodium.crypto_sign_verify_detached(rxsigmat.raw, rxsrdr.raw, rxvermat.raw)
assert not result # None if verifies successfully else raises ValueError
""" Done Test """
def test_parser():
"""
Test the support functionality for Parser stream processor
"""
logger.setLevel("ERROR")
# Test sequence of events given set of secrets
secrets = [
'ArwXoACJgOleVZ2PY7kXn7rA0II0mHYDhc6WrBH8fDAc',
'A6zz7M08-HQSFq92sJ8KJOT2cZ47x7pXFQLPB0pckB3Q',
'AcwFTk-wgk3ZT2buPRIbK-zxgPx-TKbaegQvPEivN90Y',
'Alntkt3u6dDgiQxTATr01dy8M72uuaZEf9eTdM-70Gk8',
'A1-QxDkso9-MR1A8rZz_Naw6fgaAtayda8hrbkRVVu1E',
'AKuYMe09COczwf2nIoD5AE119n7GLFOVFlNLxZcKuswc',
'AxFfJTcSuEE11FINfXMqWttkZGnUZ8KaREhrnyAXTsjw',
'ALq-w1UKkdrppwZzGTtz4PWYEeWm0-sDHzOv5sq96xJY'
]
with openDB("controller") as conDB, openDB("validator") as valDB:
event_digs = [] # list of event digs in sequence
# create event stream
msgs = bytearray()
# create signers
signers = [Signer(qb64=secret) for secret in secrets] # faster
assert [signer.qb64 for signer in signers] == secrets
# Event 0 Inception Transferable (nxt digest not empty)
serder = incept(keys=[signers[0].verfer.qb64],
nxt=Nexter(keys=[signers[1].verfer.qb64]).qb64)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[0].sign(serder.raw, index=0) # return siger
# create key event verifier state
kever = Kever(serder=serder, sigers=[siger], baser=conDB)
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
assert msgs == bytearray(b'{"v":"KERI10JSON0000ed_","i":"DSuhyBcPZEZLK-fcw5tzHn2N46wRCG_ZOo'
b'eKtWTOunRA","s":"0","t":"icp","kt":"1","k":["DSuhyBcPZEZLK-fcw5t'
b'zHn2N46wRCG_ZOoeKtWTOunRA"],"n":"EPYuj8mq_PYYsoBKkzX1kxSPGYBWaIy'
b'a3slgCOyOtlqU","bt":"0","b":[],"c":[],"a":[]}-AABAAmagesCSY8QhYY'
b'HCJXEWpsGD62qoLt2uyT0_Mq5lZPR88JyS5UrwFKFdcjPqyKc_SKaKDJhkGWCk07'
b'k_kVkjyCA')
# Event 1 Rotation Transferable
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[1].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[2].verfer.qb64]).qb64,
sn=1)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[1].sign(serder.raw, index=0) # returns siger
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 2 Rotation Transferable
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[2].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[3].verfer.qb64]).qb64,
sn=2)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[2].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 3 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=3)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[2].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 4 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=4)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[2].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 5 Rotation Transferable
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[3].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[4].verfer.qb64]).qb64,
sn=5)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[3].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 6 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=6)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[3].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 7 Rotation to null NonTransferable Abandon
# nxt digest is empty
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[4].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt="",
sn=7)
event_digs.append(serder.dig)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[4].sign(serder.raw, index=0)
# update key event verifier state
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 8 Interaction
serder = interact(pre=kever.prefixer.qb64,
dig=kever.serder.diger.qb64,
sn=8)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[4].sign(serder.raw, index=0)
# update key event verifier state
with pytest.raises(ValidationError): # nulled so reject any more events
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
# Event 8 Rotation
serder = rotate(pre=kever.prefixer.qb64,
keys=[signers[4].verfer.qb64],
dig=kever.serder.diger.qb64,
nxt=Nexter(keys=[signers[5].verfer.qb64]).qb64,
sn=8)
# create sig counter
counter = Counter(CtrDex.ControllerIdxSigs) # default is count = 1
# sign serialization
siger = signers[4].sign(serder.raw, index=0)
# update key event verifier state
with pytest.raises(ValidationError): # nontransferable so reject update
kever.update(serder=serder, sigers=[siger])
#extend key event stream
msgs.extend(serder.raw)
msgs.extend(counter.qb64b)
msgs.extend(siger.qb64b)
assert len(msgs) == 3171
pre = kever.prefixer.qb64
db_digs = [bytes(val).decode("utf-8") for val in kever.baser.getKelIter(pre)]
assert db_digs == event_digs
kevery = Kevery(db=valDB)
parser = Parser(kvy=kevery)
parser.process(ims=bytearray(msgs)) # make copy
assert parser.ims == bytearray(b'') # emptied
assert pre in kevery.kevers
vkever = kevery.kevers[pre]
assert vkever.sn == kever.sn
assert vkever.verfers[0].qb64 == kever.verfers[0].qb64
assert vkever.verfers[0].qb64 == signers[4].verfer.qb64
db_digs = [bytes(val).decode("utf-8") for val in kevery.db.getKelIter(pre)]
assert db_digs == event_digs
parser = Parser() # no kevery
parser.process(ims=msgs)
assert parser.ims == bytearray(b'')
assert not os.path.exists(kevery.db.path)
assert not os.path.exists(kever.baser.path)
""" Done Test """
if __name__ == "__main__":
test_receipt()
| 44.839298
| 125
| 0.612307
| 20,681
| 196,710
| 5.782747
| 0.072966
| 0.01271
| 0.009532
| 0.010009
| 0.801343
| 0.774753
| 0.754258
| 0.739558
| 0.718136
| 0.701044
| 0
| 0.062431
| 0.282131
| 196,710
| 4,386
| 126
| 44.849521
| 0.784473
| 0.167846
| 0
| 0.724507
| 0
| 0.004839
| 0.231837
| 0.225841
| 0
| 0
| 0
| 0
| 0.294504
| 1
| 0.008642
| false
| 0
| 0.006568
| 0
| 0.015209
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3894c900dbe82a9594daff781b656cfc00e19c13
| 131
|
py
|
Python
|
pytorch_grad_cam/utils/__init__.py
|
hddlovefxx/-pytorch-grad-cam
|
6eb5bd3a41bcd661c6acc53853258282286768fe
|
[
"MIT"
] | null | null | null |
pytorch_grad_cam/utils/__init__.py
|
hddlovefxx/-pytorch-grad-cam
|
6eb5bd3a41bcd661c6acc53853258282286768fe
|
[
"MIT"
] | null | null | null |
pytorch_grad_cam/utils/__init__.py
|
hddlovefxx/-pytorch-grad-cam
|
6eb5bd3a41bcd661c6acc53853258282286768fe
|
[
"MIT"
] | null | null | null |
from pytorch_grad_cam.utils.image import deprocess_image
from pytorch_grad_cam.utils.svd_on_activations import get_2d_projection
| 43.666667
| 72
| 0.89313
| 21
| 131
| 5.142857
| 0.666667
| 0.203704
| 0.277778
| 0.333333
| 0.425926
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.076336
| 131
| 2
| 73
| 65.5
| 0.884298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
389e18a3eed237aaef0e18c93710001a12fa5b70
| 215
|
py
|
Python
|
xview3/dataset/__init__.py
|
BloodAxe/xView3-The-First-Place-Solution
|
9a9600e7dfbaa24ff5a72c81061fbbbfed865847
|
[
"MIT"
] | 39
|
2022-01-11T05:20:29.000Z
|
2022-03-09T10:35:47.000Z
|
xview3/dataset/__init__.py
|
DIUx-xView/xView3_first_place
|
63a594601bf71a96909230d31097dca790b40ff2
|
[
"MIT"
] | 1
|
2022-03-07T09:50:22.000Z
|
2022-03-07T09:50:22.000Z
|
xview3/dataset/__init__.py
|
BloodAxe/xView3-The-First-Place-Solution
|
9a9600e7dfbaa24ff5a72c81061fbbbfed865847
|
[
"MIT"
] | 4
|
2022-01-14T16:14:02.000Z
|
2022-02-22T02:17:33.000Z
|
from .io import *
from .data_module import *
from .fixed_crop_dataset import *
from .keypoint_dataset import *
from .normalization import *
from .random_crop_dataset import *
from .crop_each_object_dataset import *
| 26.875
| 39
| 0.804651
| 30
| 215
| 5.466667
| 0.433333
| 0.365854
| 0.310976
| 0.256098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130233
| 215
| 7
| 40
| 30.714286
| 0.877005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2a3c82dff7ccaa4ea421a119d4e5788b38627395
| 3,921
|
py
|
Python
|
tests/test_run.py
|
wingrune/Parietal
|
bdd82de0a0c98b22fd5b5c5dd7b42bc775b1bf48
|
[
"MIT"
] | null | null | null |
tests/test_run.py
|
wingrune/Parietal
|
bdd82de0a0c98b22fd5b5c5dd7b42bc775b1bf48
|
[
"MIT"
] | null | null | null |
tests/test_run.py
|
wingrune/Parietal
|
bdd82de0a0c98b22fd5b5c5dd7b42bc775b1bf48
|
[
"MIT"
] | null | null | null |
import nibabel as nib
from nibabel.gifti.gifti import GiftiDataArray, GiftiImage
from nilearn import datasets
import numpy as np
import os
import pytest
from tempfile import TemporaryDirectory
from msm import utils
from msm import run
def test_run():
"""Main function run_msm should run without error."""
with TemporaryDirectory() as tmp_dir:
# Load fsaverage mesh
fs3 = datasets.fetch_surf_fsaverage(mesh="fsaverage3")
mesh_path = fs3.sphere_left
tmp_mesh_path = os.path.join(tmp_dir, os.path.basename(mesh_path[:-3]))
# Ungzip mesh to new file
utils.ungzip(mesh_path, tmp_mesh_path)
n_voxels = 642
mesh_coordsys = nib.load(tmp_mesh_path).darrays[0].coordsys
# Generate random contrast maps
# for source and target individuals
# with fsaverage coordinate system
source_contrasts_list = [
os.path.join(tmp_dir, "source1.gii"),
os.path.join(tmp_dir, "source2.gii"),
]
for source_contrast in source_contrasts_list:
img = GiftiImage()
data = GiftiDataArray(np.ones(n_voxels), coordsys=mesh_coordsys)
img.add_gifti_data_array(data)
img.add_gifti_data_array(data)
nib.save(img, source_contrast)
target_contrasts_list = [
os.path.join(tmp_dir, "target1.gii"),
os.path.join(tmp_dir, "target2.gii"),
]
for target_contrast in target_contrasts_list:
img = GiftiImage()
data = GiftiDataArray(np.ones(n_voxels), coordsys=mesh_coordsys)
img.add_gifti_data_array(data)
img.add_gifti_data_array(data)
nib.save(img, target_contrast)
mesh_gii, transformed_gii = run.run_msm(
source_contrasts_list,
tmp_mesh_path,
target_contrasts_list,
)
assert mesh_gii.darrays[0].data.shape[0] == n_voxels
assert transformed_gii.darrays[0].data.shape[0] == n_voxels
@pytest.mark.parametrize("iterations", [1, "1,1,1,1"])
def test_run_iterations(iterations):
"""Main function run_msm should run without error."""
with TemporaryDirectory() as tmp_dir:
# Load fsaverage mesh
fs3 = datasets.fetch_surf_fsaverage(mesh="fsaverage3")
mesh_path = fs3.sphere_left
tmp_mesh_path = os.path.join(tmp_dir, os.path.basename(mesh_path[:-3]))
# Ungzip mesh to new file
utils.ungzip(mesh_path, tmp_mesh_path)
n_voxels = 642
mesh_coordsys = nib.load(tmp_mesh_path).darrays[0].coordsys
# Generate random contrast maps
# for source and target individuals
# with fsaverage coordinate system
source_contrasts_list = [
os.path.join(tmp_dir, "source1.gii"),
os.path.join(tmp_dir, "source2.gii"),
]
for source_contrast in source_contrasts_list:
img = GiftiImage()
data = GiftiDataArray(np.ones(n_voxels), coordsys=mesh_coordsys)
img.add_gifti_data_array(data)
img.add_gifti_data_array(data)
nib.save(img, source_contrast)
target_contrasts_list = [
os.path.join(tmp_dir, "target1.gii"),
os.path.join(tmp_dir, "target2.gii"),
]
for target_contrast in target_contrasts_list:
img = GiftiImage()
data = GiftiDataArray(np.ones(n_voxels), coordsys=mesh_coordsys)
img.add_gifti_data_array(data)
img.add_gifti_data_array(data)
nib.save(img, target_contrast)
mesh_gii, transformed_gii = run.run_msm(
source_contrasts_list,
tmp_mesh_path,
target_contrasts_list,
iterations=iterations,
)
assert mesh_gii.darrays[0].data.shape[0] == n_voxels
assert transformed_gii.darrays[0].data.shape[0] == n_voxels
| 35.324324
| 79
| 0.638613
| 494
| 3,921
| 4.811741
| 0.17004
| 0.047118
| 0.04207
| 0.054691
| 0.875894
| 0.875894
| 0.875894
| 0.875894
| 0.875894
| 0.875894
| 0
| 0.012951
| 0.271359
| 3,921
| 110
| 80
| 35.645455
| 0.819041
| 0.096404
| 0
| 0.765432
| 0
| 0
| 0.035481
| 0
| 0
| 0
| 0
| 0
| 0.049383
| 1
| 0.024691
| false
| 0
| 0.111111
| 0
| 0.135802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a9858d5352d238ace9c7ff3e574d7117b35799c
| 43,020
|
py
|
Python
|
geneClassification/geneClassification.py
|
jsharbrough/CyMIRA_gene_classification
|
8c2766b970a2441dae26c35fefa28319f7aa96ef
|
[
"MIT"
] | null | null | null |
geneClassification/geneClassification.py
|
jsharbrough/CyMIRA_gene_classification
|
8c2766b970a2441dae26c35fefa28319f7aa96ef
|
[
"MIT"
] | null | null | null |
geneClassification/geneClassification.py
|
jsharbrough/CyMIRA_gene_classification
|
8c2766b970a2441dae26c35fefa28319f7aa96ef
|
[
"MIT"
] | null | null | null |
import sys
'''
geneClassification v3.0
USAGE:
python geneClassification.py CyMIRA.txt Orthogroups.txt targetingPredictions.txt > CyMIRA+targeting.txt
python geneClassification.py help
'''
def geneClassification(atFile,Orthogroups,genePredictions):
CyMIRA = buildGeneClassDict(atFile) #build CyMIRA access dictionary
catList = ['not','ot','ot-I','ot-NI','EC','mt','pt','mt-I','mt-NI','mt-EC','pt-I','pt-NI','pt-EC','mt-DNA_RRR','mt-Mito_TAT','mt-Mitoribosome','mt-Mitoribosome;Large_Subunit','mt-Mitoribosome;Small_Subunit','mt-OXPHOS','mt-OXPHOS;Complex_I','mt-OXPHOS;Complex_III','mt-OXPHOS;Complex_IV','mt-OXPHOS;Complex_V','mt-PPR','mt-Transcription_and_Transcript_Maturation','mt-Transcription_and_Transcript_Maturation;Intron_Splicing','mt-Transcription_and_Transcript_Maturation;RNA_Polymerase','mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing','mt-Transcription_and_Transcript_Maturation;mTERF','mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification','mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification','mt-tRNA_Aminoacylation','pt-ACCase','pt-CLP','pt-Chlororibosome','pt-Chlororibosome;Large_Subunit','pt-Chlororibosome;Small_Subunit','pt-DNA_RRR','pt-PPR','pt-Photosynthesis','pt-Photosynthesis;ATP_Synthase','pt-Photosynthesis;Cytochrome_b6f','pt-Photosynthesis;NDH','pt-Photosynthesis;PSI','pt-Photosynthesis;PSII','pt-Photosynthesis;Rubisco','pt-Transcription_and_Transcript_Maturation','pt-Transcription_and_Transcript_Maturation;Intron_Splicing','pt-Transcription_and_Transcript_Maturation;RNA_Polymerase','pt-Transcription_and_Transcript_Maturation;Sigma_Factor','pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing','pt-Transcription_and_Transcript_Maturation;mTERF','pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification','pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification','pt-tRNA_Aminoacylation']
orthoDict = {} #build orthogroup dictionary (OG as key, gene list as value)
infile = open(Orthogroups,'r')
for line in infile:
realLine = line
while realLine[-1] == '\t' or realLine[-1] == '\n' or realLine[-1] == '\r':
realLine = realLine[0:-1]
lineSplit = realLine.split(' ')
og = lineSplit[0]
orthoDict[og[0:-1]] = lineSplit[1:]
infile.close()
backwardsOGDict = {} #Associate genes with orthogroup (Gene as key, OG as value)
for og in orthoDict:
currList = orthoDict[og]
for gene in currList:
backwardsOGDict[gene] = og
predDict = {}
geneList = []
infile = open(genePredictions,'r')
for line in infile:
if line[0] != '#':
realLine = line
while realLine[-1] == '\t' or realLine[-1] == '\n' or realLine[-1] == '\r':
realLine = realLine[0:-1]
lineSplit = realLine.split('\t')
predDict[lineSplit[0]] = lineSplit[1]
geneList.append(lineSplit[0])
infile.close()
newCymira = {'pt-Chlororibosome':[],'pt-Transcription_and_Transcript_Maturation':[],'pt-Photosynthesis':[],'mt-Transcription_and_Transcript_Maturation':[],'mt-OXPHOS':[],'mt-Mitoribosome':[],'mt-OXPHOS;Complex_V': [], 'pt-Chlororibosome;Small_Subunit': [], 'pt-Photosynthesis;Cytochrome_b6f': [], 'mt-Transcription_and_Transcript_Maturation;Intron_Splicing': [], 'pt-Transcription_and_Transcript_Maturation;Intron_Splicing': [], 'mt-Transcription_and_Transcript_Maturation;RNA_Polymerase': [], 'mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification': [], 'mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification': [], 'pt-ACCase': [], 'pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification': [], 'mt-OXPHOS;Complex_I': [], 'mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing': [], 'pt': [], 'not': [], 'mt-Mitoribosome;Large_Subunit': [], 'pt-Photosynthesis;PSII': [], 'pt-Chlororibosome;Large_Subunit': [], 'pt-PPR': [], 'pt-Transcription_and_Transcript_Maturation;Sigma_Factor': [], 'pt-Photosynthesis;PSI': [], 'pt-DNA_RRR': [], 'pt-Transcription_and_Transcript_Maturation;mTERF': [], 'mt-Mitoribosome;Small_Subunit': [], 'mt-NI': [], 'mt-OXPHOS;Complex_III': [], 'pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification': [], 'mt-PPR': [], 'pt-I': [], 'mt-I': [], 'mt-DNA_RRR': [], 'ot-NI': [], 'mt-Mito_TAT': [], 'ot-I': [], 'pt-Photosynthesis;ATP_Synthase': [], 'pt-Transcription_and_Transcript_Maturation;RNA_Polymerase': [], 'mt-Transcription_and_Transcript_Maturation;mTERF': [], 'mt-OXPHOS;Complex_IV': [], 'pt-tRNA_Aminoacylation': [], 'pt-NI': [], 'pt-Photosynthesis;Rubisco': [], 'pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing': [], 'mt': [], 'pt-CLP': [], 'mt-tRNA_Aminoacylation': [], 'ot': [], 'pt-Photosynthesis;NDH': []}
for gene in geneList:
atDict = {}
if gene in backwardsOGDict:
og = backwardsOGDict[gene]
atList = []
ogList = orthoDict[og]
for ortholog in ogList:
if 'AT' == ortholog[0:2]:
ogSplit = ortholog.split('.')
atList.append(ogSplit[0])
for at in atList:
for cat in catList:
if at in CyMIRA[cat]:
if cat in atDict:
atDict[cat] += 1
else:
atDict[cat] = 1
if 'ot' in atDict:
if 'ot-I' in atDict:
if atDict['ot-I']/float(len(atList)) >= 0.5:
if 'ot-NI' in atDict:
del atDict['ot-NI']
if 'mt-I' in atDict and 'mt-NI' in atDict:
del atDict['mt-NI']
if 'pt-I' in atDict and 'pt-NI' in atDict:
del atDict['pt-NI']
if 'not' in atDict:
del atDict['not']
elif atDict['ot']/float(len(atList)) >= 0.5 and predDict[gene] != 'non-organellar':
if predDict[gene] == 'mitochondria':
if 'mt' not in atDict:
atDict['mt'] = 1
if predDict[gene] == 'plastid':
if 'pt' not in atDict:
atDict['pt'] = 1
if predDict[gene] == 'dual':
if 'mt' not in atDict:
atDict['mt'] = 1
if 'pt' not in atDict:
atDict['pt'] = 1
if 'ot-I' not in atDict and 'ot-NI' not in atDict:
atDict['ot-NI'] = 1
if 'mt' in atDict and 'mt-I' not in atDict and 'mt-NI' not in atDict:
atDict['mt-NI'] = 1
if 'pt' in atDict and 'pt-I' not in atDict and 'pt-NI' not in atDict:
atDict['pt-NI'] = 1
if 'not' in atDict:
del atDict['not']
else:
atDict = {'not':1}
elif atDict['ot']/float(len(atList)) >= 0.5 and predDict[gene] != 'non-organellar':
if predDict[gene] == 'mitochondria':
if 'mt' not in atDict:
atDict['mt'] = 1
if predDict[gene] == 'plastid':
if 'pt' not in atDict:
atDict['pt'] = 1
if predDict[gene] == 'dual':
if 'mt' not in atDict:
atDict['mt'] = 1
if 'pt' not in atDict:
atDict['pt'] = 1
if 'ot-I' not in atDict and 'ot-NI' not in atDict:
atDict['ot-NI'] = 1
if 'mt' in atDict and 'mt-I' not in atDict and 'mt-NI' not in atDict:
atDict['mt-NI'] = 1
if 'pt' in atDict and 'pt-I' not in atDict and 'pt-NI' not in atDict:
atDict['pt-NI'] = 1
if 'not' in atDict:
del atDict['not']
else:
atDict = {'not':1}
else:
atDict = {'not':1}
else:
atDict = {'not':1}
for cat in atDict:
if cat != 'EC' and cat != 'mt-EC' and cat != 'pt-EC':
currList = newCymira[cat]
currList.append(gene)
newCymira[cat] = currList
ECs = []
mt_ECs = []
pt_ECs = []
for item in newCymira['mt-Mitoribosome']:
if item not in ECs:
ECs.append(item)
if item not in mt_ECs:
mt_ECs.append(item)
for item in newCymira['mt-OXPHOS']:
if item not in ECs:
ECs.append(item)
if item not in mt_ECs:
mt_ECs.append(item)
for item in newCymira['mt-Mito_TAT']:
if item not in ECs:
ECs.append(item)
if item not in mt_ECs:
mt_ECs.append(item)
for item in newCymira['pt-ACCase']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
for item in newCymira['pt-CLP']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
for item in newCymira['pt-Chlororibosome']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
for item in newCymira['pt-Photosynthesis']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
newCymira['EC'] = ECs
newCymira['mt-EC'] = mt_ECs
newCymira['pt-EC'] = pt_ECs
sys.stdout.write('Not-organelle-targeted\tOrganelle-targeted\tOrganelle-targeted_Interacting\tOrganelle-targeted_Non-interacting\tEnzyme_Complexes\tMitochondria-targeted\tPlastid-targeted\tMitochondria-targeted_Interacting\tMitochondria-targeted_Non-interacting\tMitochondria_Enzyme_Complexes\tPlastid-targeted_Interacting\tPlastid-targeted_Non-interacting\tPlastid_Enzyme_Complexes')
for item in catList[13:]:
sys.stdout.write('\t' + item)
sys.stdout.write('\n')
i = 0
while i < len(newCymira['not']):
for cat in catList:
if i < len(newCymira[cat]):
currGenes = newCymira[cat]
sys.stdout.write(currGenes[i] + '\t')
else:
sys.stdout.write('\t')
sys.stdout.write('\n')
i += 1
def buildGeneClassDict(atFile):
infile = open(atFile,'r')
CyMIRA = {'pt-Chlororibosome':[],'pt-Transcription_and_Transcript_Maturation':[],'pt-Photosynthesis':[],'mt-Transcription_and_Transcript_Maturation':[],'mt-OXPHOS':[],'mt-Mitoribosome':[],'mt-OXPHOS;Complex_V': [], 'pt-Chlororibosome;Small_Subunit': [], 'pt-Photosynthesis;Cytochrome_b6f': [], 'mt-Transcription_and_Transcript_Maturation;Intron_Splicing': [], 'pt-Transcription_and_Transcript_Maturation;Intron_Splicing': [], 'mt-Transcription_and_Transcript_Maturation;RNA_Polymerase': [], 'mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification': [], 'mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification': [], 'pt-ACCase': [], 'pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification': [], 'mt-OXPHOS;Complex_I': [], 'mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing': [], 'pt': [], 'not': [], 'mt-Mitoribosome;Large_Subunit': [], 'pt-Photosynthesis;PSII': [], 'pt-Chlororibosome;Large_Subunit': [], 'pt-PPR': [], 'pt-Transcription_and_Transcript_Maturation;Sigma_Factor': [], 'pt-Photosynthesis;PSI': [], 'pt-DNA_RRR': [], 'pt-Transcription_and_Transcript_Maturation;mTERF': [], 'mt-Mitoribosome;Small_Subunit': [], 'mt-NI': [], 'mt-OXPHOS;Complex_III': [], 'pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification': [], 'mt-PPR': [], 'pt-I': [], 'mt-I': [], 'mt-DNA_RRR': [], 'ot-NI': [], 'mt-Mito_TAT': [], 'ot-I': [], 'pt-Photosynthesis;ATP_Synthase': [], 'pt-Transcription_and_Transcript_Maturation;RNA_Polymerase': [], 'mt-Transcription_and_Transcript_Maturation;mTERF': [], 'mt-OXPHOS;Complex_IV': [], 'pt-tRNA_Aminoacylation': [], 'pt-NI': [], 'pt-Photosynthesis;Rubisco': [], 'pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing': [], 'mt': [], 'pt-CLP': [], 'mt-tRNA_Aminoacylation': [], 'ot': [], 'pt-Photosynthesis;NDH': []}
for line in infile:
realLine = line
while realLine[-1] == '\t' or realLine[-1] == '\n' or realLine[-1] == '\r':
realLine = realLine[0:-1]
lineSplit = realLine.split('\t')
if lineSplit[1] == 'Other' or lineSplit[1] == 'No Call':
currList = CyMIRA['not']
currList.append(lineSplit[0])
CyMIRA['not'] = currList
elif lineSplit[1] == 'Mitochondria':
currList = CyMIRA['mt']
currList.append(lineSplit[0])
CyMIRA['mt'] = currList
currList = CyMIRA['ot']
currList.append(lineSplit[0])
CyMIRA['ot'] = currList
if lineSplit[2] == 'No':
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['ot-NI']
currList.append(lineSplit[0])
CyMIRA['ot-NI'] = currList
else:
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['ot-I']
currList.append(lineSplit[0])
CyMIRA['ot-I'] = currList
if lineSplit[3] == 'DNA-RRR':
currList = CyMIRA['mt-DNA_RRR']
currList.append(lineSplit[0])
CyMIRA['mt-DNA_RRR'] = currList
elif lineSplit[3] == 'PPR':
currList = CyMIRA['mt-PPR']
currList.append(lineSplit[0])
CyMIRA['mt-PPR'] = currList
elif lineSplit[3] == 'tRNA Aminoacylation':
currList = CyMIRA['mt-tRNA_Aminoacylation']
currList.append(lineSplit[0])
CyMIRA['mt-tRNA_Aminoacylation'] = currList
elif lineSplit[3] == 'Mito TAT Complex':
currList = CyMIRA['mt-Mito_TAT']
currList.append(lineSplit[0])
CyMIRA['mt-Mito_TAT'] = currList
elif lineSplit[3] == 'Mitoribosome':
currList = CyMIRA['mt-Mitoribosome']
currList.append(lineSplit[0])
CyMIRA['mt-Mitoribosome'] = currList
if lineSplit[4] == 'Large Subunit':
currList = CyMIRA['mt-Mitoribosome;Large_Subunit']
currList.append(lineSplit[0])
CyMIRA['mt-Mitoribosome;Large_Subunit'] = currList
elif lineSplit[4] == 'Small Subunit':
currList = CyMIRA['mt-Mitoribosome;Small_Subunit']
currList.append(lineSplit[0])
CyMIRA['mt-Mitoribosome;Small_Subunit'] = currList
elif lineSplit[3] == 'OXPHOS':
currList = CyMIRA['mt-OXPHOS']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS'] = currList
if lineSplit[4] == 'Complex I':
currList = CyMIRA['mt-OXPHOS;Complex_I']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_I'] = currList
elif lineSplit[4] == 'Complex III':
currList = CyMIRA['mt-OXPHOS;Complex_III']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_III'] = currList
elif lineSplit[4] == 'Complex IV':
currList = CyMIRA['mt-OXPHOS;Complex_IV']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_IV'] = currList
elif lineSplit[4] == 'Complex V':
currList = CyMIRA['mt-OXPHOS;Complex_V']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_V'] = currList
elif lineSplit[3] == 'Transcription and Transcript Maturation':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation'] = currList
if lineSplit[4] == 'Intron Splicing':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;Intron_Splicing']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;Intron_Splicing'] = currList
elif lineSplit[4] == 'RNA Polymerase':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;RNA_Polymerase']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;RNA_Polymerase'] = currList
elif lineSplit[4] == 'Transcript End Processing':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing'] = currList
elif lineSplit[4] == 'mTERF':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;mTERF']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;mTERF'] = currList
elif lineSplit[4] == 'rRNA Base Modification':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification'] = currList
elif lineSplit[4] == 'tRNA Base Modification':
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification'] = currList
elif lineSplit[1] == 'Plastid':
currList = CyMIRA['pt']
currList.append(lineSplit[0])
CyMIRA['pt'] = currList
currList = CyMIRA['ot']
currList.append(lineSplit[0])
CyMIRA['ot'] = currList
if lineSplit[2] == 'No':
currList = CyMIRA['pt-NI']
currList.append(lineSplit[0])
CyMIRA['pt-NI'] = currList
currList = CyMIRA['ot-NI']
currList.append(lineSplit[0])
CyMIRA['ot-NI'] = currList
else:
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['ot-I']
currList.append(lineSplit[0])
CyMIRA['ot-I'] = currList
if lineSplit[3] == 'DNA-RRR':
currList = CyMIRA['pt-DNA_RRR']
currList.append(lineSplit[0])
CyMIRA['pt-DNA_RRR'] = currList
elif lineSplit[3] == 'PPR':
currList = CyMIRA['pt-PPR']
currList.append(lineSplit[0])
CyMIRA['pt-PPR'] = currList
elif lineSplit[3] == 'tRNA Aminoacylation':
currList = CyMIRA['pt-tRNA_Aminoacylation']
currList.append(lineSplit[0])
CyMIRA['pt-tRNA_Aminoacylation'] = currList
elif lineSplit[3] == 'ACCase':
currList = CyMIRA['pt-ACCase']
currList.append(lineSplit[0])
CyMIRA['pt-ACCase'] = currList
elif lineSplit[3] == 'CLP':
currList = CyMIRA['pt-CLP']
currList.append(lineSplit[0])
CyMIRA['pt-CLP'] = currList
elif lineSplit[3] == 'Chlororibosome':
currList = CyMIRA['pt-Chlororibosome']
currList.append(lineSplit[0])
CyMIRA['pt-Chlororibosome'] = currList
if lineSplit[4] == 'Large Subunit':
currList = CyMIRA['pt-Chlororibosome;Large_Subunit']
currList.append(lineSplit[0])
CyMIRA['pt-Chlororibosome;Large_Subunit'] = currList
elif lineSplit[4] == 'Small Subunit':
currList = CyMIRA['pt-Chlororibosome;Small_Subunit']
currList.append(lineSplit[0])
CyMIRA['pt-Chlororibosome;Small_Subunit'] = currList
elif lineSplit[3] == 'Photosynthesis':
currList = CyMIRA['pt-Photosynthesis']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis'] = currList
if lineSplit[4] == 'ATP Synthase':
currList = CyMIRA['pt-Photosynthesis;ATP_Synthase']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;ATP_Synthase'] = currList
elif lineSplit[4] == 'Cytochrome b6f':
currList = CyMIRA['pt-Photosynthesis;Cytochrome_b6f']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;Cytochrome_b6f'] = currList
elif lineSplit[4] == 'NDH':
currList = CyMIRA['pt-Photosynthesis;NDH']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;NDH'] = currList
elif lineSplit[4] == 'PSI':
currList = CyMIRA['pt-Photosynthesis;PSI']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;PSI'] = currList
elif lineSplit[4] == 'PSII':
currList = CyMIRA['pt-Photosynthesis;PSII']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;PSII'] = currList
elif lineSplit[4] == 'Rubisco':
currList = CyMIRA['pt-Photosynthesis;Rubisco']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;Rubisco'] = currList
elif lineSplit[3] == 'Transcription and Transcript Maturation':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation'] = currList
if lineSplit[4] == 'Intron Splicing':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;Intron_Splicing']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;Intron_Splicing'] = currList
elif lineSplit[4] == 'RNA Polymerase':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;RNA_Polymerase']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;RNA_Polymerase'] = currList
elif lineSplit[4] == 'Transcript End Processing':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing'] = currList
elif lineSplit[4] == 'mTERF':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;mTERF']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;mTERF'] = currList
elif lineSplit[4] == 'rRNA Base Modification':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification'] = currList
elif lineSplit[4] == 'tRNA Base Modification':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification'] = currList
elif lineSplit[4] == 'Sigma Factor':
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;Sigma_Factor']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;Sigma_Factor'] = currList
elif lineSplit[1] == 'Dual':
currList = CyMIRA['pt']
currList.append(lineSplit[0])
CyMIRA['pt'] = currList
currList = CyMIRA['mt']
currList.append(lineSplit[0])
CyMIRA['mt'] = currList
currList = CyMIRA['ot']
currList.append(lineSplit[0])
CyMIRA['ot'] = currList
if lineSplit[2] == 'No':
currList = CyMIRA['pt-NI']
currList.append(lineSplit[0])
CyMIRA['pt-NI'] = currList
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['ot-NI']
currList.append(lineSplit[0])
CyMIRA['ot-NI'] = currList
else:
currList = CyMIRA['ot-I']
currList.append(lineSplit[0])
CyMIRA['ot-I'] = currList
if lineSplit[3] == 'DNA-RRR':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-DNA_RRR']
currList.append(lineSplit[0])
CyMIRA['pt-DNA_RRR'] = currList
currList = CyMIRA['mt-DNA_RRR']
currList.append(lineSplit[0])
CyMIRA['mt-DNA_RRR'] = currList
elif lineSplit[3] == 'PPR':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-PPR']
currList.append(lineSplit[0])
CyMIRA['pt-PPR'] = currList
currList = CyMIRA['mt-PPR']
currList.append(lineSplit[0])
CyMIRA['mt-PPR'] = currList
elif lineSplit[3] == 'tRNA Aminoacylation':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-tRNA_Aminoacylation']
currList.append(lineSplit[0])
CyMIRA['pt-tRNA_Aminoacylation'] = currList
currList = CyMIRA['mt-tRNA_Aminoacylation']
currList.append(lineSplit[0])
CyMIRA['mt-tRNA_Aminoacylation'] = currList
elif lineSplit[3] == 'Mito TAT Complex':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-NI']
currList.append(lineSplit[0])
CyMIRA['pt-NI'] = currList
currList = CyMIRA['mt-Mito_TAT']
currList.append(lineSplit[0])
CyMIRA['mt-Mito_TAT'] = currList
elif lineSplit[3] == 'Mitoribosome':
currList = CyMIRA['pt-NI']
currList.append(lineSplit[0])
CyMIRA['pt-NI'] = currList
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['mt-Mitoribosome']
currList.append(lineSplit[0])
CyMIRA['mt-Mitoribosome'] = currList
if lineSplit[4] == 'Large Subunit':
currList = CyMIRA['mt-Mitoribosome;Large_Subunit']
currList.append(lineSplit[0])
CyMIRA['mt-Mitoribosome;Large_Subunit'] = currList
elif lineSplit[4] == 'Small Subunit':
currList = CyMIRA['mt-Mitoribosome;Small_Subunit']
currList.append(lineSplit[0])
CyMIRA['mt-Mitoribosome;Small_Subunit'] = currList
elif lineSplit[3] == 'OXPHOS':
currList = CyMIRA['pt-NI']
currList.append(lineSplit[0])
CyMIRA['pt-NI'] = currList
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['mt-OXPHOS']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS'] = currList
if lineSplit[4] == 'Complex I':
currList = CyMIRA['mt-OXPHOS;Complex_I']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_I'] = currList
elif lineSplit[4] == 'Complex III':
currList = CyMIRA['mt-OXPHOS;Complex_III']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_III'] = currList
elif lineSplit[4] == 'Complex IV':
currList = CyMIRA['mt-OXPHOS;Complex_IV']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_IV'] = currList
elif lineSplit[4] == 'Complex V':
currList = CyMIRA['mt-OXPHOS;Complex_V']
currList.append(lineSplit[0])
CyMIRA['mt-OXPHOS;Complex_V'] = currList
elif lineSplit[3] == 'ACCase':
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-ACCase']
currList.append(lineSplit[0])
CyMIRA['pt-ACCase'] = currList
elif lineSplit[3] == 'CLP':
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-CLP']
currList.append(lineSplit[0])
CyMIRA['pt-CLP'] = currList
elif lineSplit[3] == 'Chlororibosome':
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-Chlororibosome']
currList.append(lineSplit[0])
CyMIRA['pt-Chlororibosome'] = currList
if lineSplit[4] == 'Large Subunit':
currList = CyMIRA['pt-Chlororibosome;Large_Subunit']
currList.append(lineSplit[0])
CyMIRA['pt-Chlororibosome;Large_Subunit'] = currList
elif lineSplit[4] == 'Small Subunit':
currList = CyMIRA['pt-Chlororibosome;Small_Subunit']
currList.append(lineSplit[0])
CyMIRA['pt-Chlororibosome;Small_Subunit'] = currList
elif lineSplit[3] == 'Photosynthesis':
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-Photosynthesis']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis'] = currList
if lineSplit[4] == 'ATP Synthase':
currList = CyMIRA['pt-Photosynthesis;ATP_Synthase']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;ATP_Synthase'] = currList
elif lineSplit[4] == 'Cytochrome b6f':
currList = CyMIRA['pt-Photosynthesis;Cytochrome_b6f']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;Cytochrome_b6f'] = currList
elif lineSplit[4] == 'NDH':
currList = CyMIRA['pt-Photosynthesis;NDH']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;NDH'] = currList
elif lineSplit[4] == 'PSI':
currList = CyMIRA['pt-Photosynthesis;PSI']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;PSI'] = currList
elif lineSplit[4] == 'PSII':
currList = CyMIRA['pt-Photosynthesis;PSII']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;PSII'] = currList
elif lineSplit[4] == 'Rubisco':
currList = CyMIRA['pt-Photosynthesis;Rubisco']
currList.append(lineSplit[0])
CyMIRA['pt-Photosynthesis;Rubisco'] = currList
elif lineSplit[3] == 'Transcription and Transcript Maturation':
currList = CyMIRA['pt-I']
currList.append(lineSplit[0])
CyMIRA['pt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation'] = currList
if lineSplit[4] == 'Intron Splicing':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;Intron_Splicing']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;Intron_Splicing'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;Intron_Splicing']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;Intron_Splicing'] = currList
elif lineSplit[4] == 'RNA Polymerase':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;RNA_Polymerase']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;RNA_Polymerase'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;RNA_Polymerase']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;RNA_Polymerase'] = currList
elif lineSplit[4] == 'Transcript End Processing':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;Transcript_End_Processing'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;Transcript_End_Processing'] = currList
elif lineSplit[4] == 'mTERF':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;mTERF']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;mTERF'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;mTERF']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;mTERF'] = currList
elif lineSplit[4] == 'rRNA Base Modification':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;rRNA_Base_Modification'] = currList
elif lineSplit[4] == 'tRNA Base Modification':
currList = CyMIRA['mt-I']
currList.append(lineSplit[0])
CyMIRA['mt-I'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification'] = currList
currList = CyMIRA['mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification']
currList.append(lineSplit[0])
CyMIRA['mt-Transcription_and_Transcript_Maturation;tRNA_Base_Modification'] = currList
elif lineSplit[4] == 'Sigma Factor':
currList = CyMIRA['mt-NI']
currList.append(lineSplit[0])
CyMIRA['mt-NI'] = currList
currList = CyMIRA['pt-Transcription_and_Transcript_Maturation;Sigma_Factor']
currList.append(lineSplit[0])
CyMIRA['pt-Transcription_and_Transcript_Maturation;Sigma_Factor'] = currList
ECs = []
mt_ECs = []
pt_ECs = []
for item in CyMIRA['mt-Mitoribosome']:
if item not in ECs:
ECs.append(item)
if item not in mt_ECs:
mt_ECs.append(item)
for item in CyMIRA['mt-OXPHOS']:
if item not in ECs:
ECs.append(item)
if item not in mt_ECs:
mt_ECs.append(item)
for item in CyMIRA['mt-Mito_TAT']:
if item not in ECs:
ECs.append(item)
if item not in mt_ECs:
mt_ECs.append(item)
for item in CyMIRA['pt-ACCase']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
for item in CyMIRA['pt-CLP']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
for item in CyMIRA['pt-Chlororibosome']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
for item in CyMIRA['pt-Photosynthesis']:
if item not in ECs:
ECs.append(item)
if item not in pt_ECs:
pt_ECs.append(item)
CyMIRA['EC'] = ECs
CyMIRA['mt-EC'] = mt_ECs
CyMIRA['pt-EC'] = pt_ECs
infile.close()
return CyMIRA
helpStatement='\n\ngeneClassification v3.0\n\nUSAGE:\n\n\tpython geneClassification.py CyMIRA.txt Orthogroups.txt targetingPredictions.txt > CyMIRA+targeting.txt\n\n\tpython geneClassification.py help\n\n'
if 'help' in sys.argv or len(sys.argv) != 4:
sys.stderr.write(helpStatement)
else:
geneClassification(sys.argv[1],sys.argv[2],sys.argv[3])
| 59.75
| 1,827
| 0.535193
| 4,111
| 43,020
| 5.457553
| 0.036974
| 0.060171
| 0.094848
| 0.141202
| 0.902567
| 0.884828
| 0.881307
| 0.876449
| 0.861027
| 0.856525
| 0
| 0.01025
| 0.349163
| 43,020
| 719
| 1,828
| 59.833102
| 0.791064
| 0.003417
| 0
| 0.866477
| 0
| 0.002841
| 0.285269
| 0.20831
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002841
| false
| 0
| 0.00142
| 0
| 0.005682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aa571a4cb064a8912ba4adcce76d197c60db9135
| 107
|
py
|
Python
|
aoc20151203b.py
|
BarnabyShearer/aoc
|
4feb66c668b068f0f42ad99b916e80732eba5a2d
|
[
"MIT"
] | null | null | null |
aoc20151203b.py
|
BarnabyShearer/aoc
|
4feb66c668b068f0f42ad99b916e80732eba5a2d
|
[
"MIT"
] | null | null | null |
aoc20151203b.py
|
BarnabyShearer/aoc
|
4feb66c668b068f0f42ad99b916e80732eba5a2d
|
[
"MIT"
] | null | null | null |
from aoc20151203a import visited
def aoc(data):
return len(visited(data[::2]) | visited(data[1::2]))
| 17.833333
| 56
| 0.682243
| 16
| 107
| 4.5625
| 0.6875
| 0.30137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120879
| 0.149533
| 107
| 5
| 57
| 21.4
| 0.681319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
aa6973a75be1484c225e723283bc135eb43cbed1
| 15,778
|
py
|
Python
|
test.py
|
Zachary-Jackson/Todo-API-with-Flask
|
3e47565cd8188637abcf4b9859a9496b5fa8a2c6
|
[
"BSD-3-Clause"
] | null | null | null |
test.py
|
Zachary-Jackson/Todo-API-with-Flask
|
3e47565cd8188637abcf4b9859a9496b5fa8a2c6
|
[
"BSD-3-Clause"
] | null | null | null |
test.py
|
Zachary-Jackson/Todo-API-with-Flask
|
3e47565cd8188637abcf4b9859a9496b5fa8a2c6
|
[
"BSD-3-Clause"
] | null | null | null |
import base64
import json
import unittest
from playhouse.test_utils import test_database
from peewee import *
from app import app
from models import Todo, User
TODO_LIST_URL = 'http://localhost:8000/api/v1/todos'
# Add a number to this url for it to work.
TODO_ITEM_URL = 'http://localhost:8000/api/v1/todos/{}'
# BASIC_AUTH_HEADERS presumes a username of 'username' and a password of
# 'password'
BASIC_AUTH_HEADERS = {'Authorization': 'Basic ' +
base64.b64encode('username:password'.encode()).decode()}
TEST_DB = SqliteDatabase(':memory:')
TEST_DB.connect()
TEST_DB.create_tables([User, Todo], safe=True)
class TodoModelTestCase(unittest.TestCase):
"""This tests the Todo model."""
def test_todo_creation(self):
"""This tests the creation of a Todo object."""
with test_database(TEST_DB, (User, Todo)):
Todo.create(
name='Plant some seeds in the garden.'
)
self.assertEqual(Todo.select().count(), 1)
class UserModelTestCase(unittest.TestCase):
"""This tests the User model."""
def test_todo_creation(self):
"""This tests the creation of a Todo object."""
with test_database(TEST_DB, (User, Todo)):
User.create(
username='username',
password='password'
)
self.assertEqual(User.select().count(), 1)
def test_duplicate_todo_creation(self):
"""This tests the creation of a duplicate Todo object."""
with test_database(TEST_DB, (User, Todo)):
User.create(
username='username',
password='password'
)
with self.assertRaises(Exception):
User.create(
username='username',
password='password'
)
class ViewTestCase(unittest.TestCase):
"""This sets up the Flask app for testing."""
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
self.app = app.test_client()
class LoginpageViewTestCase(ViewTestCase):
"""This tests that the login page looks correct."""
def test_login_HTTP_status(self):
"""This ensures that the application's loginpage works."""
with test_database(TEST_DB, (User, Todo)):
result = self.app.get('/login')
self.assertEqual(result.status_code, 200)
self.assertIn("Username", result.data.decode())
self.assertIn("Password", result.data.decode())
self.assertNotIn("Confirm Password", result.data.decode())
def test_login_form(self):
"""This tests to see if the LoginForm works."""
with test_database(TEST_DB, (User, Todo)):
User.user_create(
username='username',
password='password'
)
self.app.get('/login')
form = {'username': 'username',
'password': 'password'}
response = self.app.post('/login', data=form)
self.assertEqual(response.status_code, 302)
def test_bad_login_form(self):
"""This tests to see if the LoginForm works."""
with test_database(TEST_DB, (User, Todo)):
User.user_create(
username='username',
password='password'
)
self.app.get('/login')
form = {'username': 'username',
'password': 'incorrect'}
response = self.app.post('/login', data=form)
self.assertEqual(response.status_code, 200)
self.assertIn(
"The username or password you interred is incorrect",
response.data.decode()
)
class HomepageViewTestCase(ViewTestCase):
"""This tests that the homepage looks correct."""
def test_homepage_HTTP_status(self):
"""This ensures that the application's homepage works."""
with test_database(TEST_DB, (User, Todo)):
result = self.app.get('/')
self.assertEqual(result.status_code, 200)
def test_homepage_information(self):
"""This checks to see if the title and new task button is found"""
with test_database(TEST_DB, (User, Todo)):
result = self.app.get('/')
self.assertIn("Add a New Task", result.data.decode())
class RegisterViewTestCase(ViewTestCase):
"""This tests that the register page looks correct."""
def test_register_HTTP_status(self):
"""This ensures that the application's register page works."""
with test_database(TEST_DB, (User, Todo)):
result = self.app.get('/register')
self.assertEqual(result.status_code, 200)
self.assertIn("Username", result.data.decode())
self.assertIn("Confirm Password", result.data.decode())
def test_register_form(self):
"""This tests to see if the RegisterForm works."""
with test_database(TEST_DB, (User, Todo)):
self.app.get('/register')
form = {'username': 'username',
'password': 'password',
'password2': 'password'}
response = self.app.post('/register', data=form)
self.assertEqual(response.status_code, 302)
# This checks to see if we actually created a new user
self.assertTrue(User.get(User.username == 'username'))
def test_bad_register_form(self):
"""This tests to see if the RegisterForm works."""
with test_database(TEST_DB, (User, Todo)):
self.app.get('/register')
form = {'username': 'username',
'password': 'password',
'password2': 'incorrect'}
response = self.app.post('/register', data=form)
self.assertEqual(response.status_code, 200)
self.assertIn(
"Passwords must match",
response.data.decode()
)
class APITestCase(ViewTestCase):
"""This ensures that the API is working correctly."""
def test_create_user(self):
"""This checks to see if create user via API works"""
with test_database(TEST_DB, (User, Todo)):
data = {
'username': 'test',
'password': 'password',
'password_verification': 'password'
}
user_url = 'http://localhost:8000/api/v1/users'
# This has created our first user
response = self.app.post(
user_url, data=data, headers=BASIC_AUTH_HEADERS
)
# This converts the response from JSON to a python dict
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(response.status_code, 201)
self.assertEqual(
response_decoded,
{"username": "test"})
# This tests the creation of a second user with the same name
response = self.app.post(
user_url, data=data, headers=BASIC_AUTH_HEADERS
)
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(
response_decoded,
{"error": "A user with that username already exists"})
# This checks the creation of a user with an invalid password
data = {
'username': 'test',
'password': 'password',
'password_verification': 'incorrect password'
}
response = self.app.post(
user_url, data=data, headers=BASIC_AUTH_HEADERS
)
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(
response_decoded,
{"error": "password and password_verification do not match"})
def test_todo_list_get(self):
"""This checks to see if the TodoList GET api is working."""
with test_database(TEST_DB, (User, Todo)):
Todo.create(
name='Plant some seeds in the garden.'
)
# This gets the api response from the server
response = self.app.get(TODO_LIST_URL)
# This converts the response from JSON to a python dict
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(response.status_code, 200)
# id should be 1 because it is the first item in the database.
# Name should be the value created above.
self.assertEqual(
response_decoded,
[{"id": 1, "name": "Plant some seeds in the garden."}])
def test_todo_list_post(self):
"""This checks to see if the TodoList POST api is working."""
with test_database(TEST_DB, (User, Todo)):
User.user_create(
username='username',
password='password'
)
data = {'name': 'Going to the grocery store'}
response = self.app.post(
TODO_LIST_URL, data=data, headers=BASIC_AUTH_HEADERS
)
# This converts the response from JSON to a python dict
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(response.status_code, 201)
# id should be 1 because it is the first item in the database.
# Name should be the value created above.
self.assertEqual(
response_decoded,
{'id': 1, 'name': 'Going to the grocery store'})
todo_object = Todo.get(Todo.id == 1)
self.assertEqual(todo_object.name, 'Going to the grocery store')
def test_todo_list_post_unauthorized(self):
"""This checks to see if the TodoList POST api is working."""
with test_database(TEST_DB, (User, Todo)):
data = {'name': 'Going to the grocery store'}
response = self.app.post(TODO_LIST_URL, data=data)
self.assertEqual(response.status_code, 401)
def test_todo_put(self):
"""This checks to see if the Todo PUT api is working."""
with test_database(TEST_DB, (User, Todo)):
User.user_create(
username='username',
password='password'
)
Todo.create(
name='Plant some seeds in the garden.'
)
# This converts a python dictionary to JSON to send to the API
data = {"id": 1, "name": "Water the seeds in the garden."}
response = self.app.put(TODO_ITEM_URL.format(1), data=data,
headers=BASIC_AUTH_HEADERS)
# This converts the response from JSON to a python dict
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(response.status_code, 200)
# id should be 1 because it is the first item in the database.
# Name should be the new value created above.
# The response should not be a list, but a dict.
self.assertEqual(
response_decoded,
{"id": 1, "name": "Water the seeds in the garden."})
def test_todo_put_token(self):
"""This checks to see if the Todo PUT api is working."""
with test_database(TEST_DB, (User, Todo)):
user = User.user_create(
username='username',
password='password'
)
Todo.create(
name='Plant some seeds in the garden.'
)
token = user.generate_auth_token().decode('ascii')
headers = {'Authorization': 'Token {}'.format(token)}
# This converts a python dictionary to JSON to send to the API
data = {"id": 1, "name": "Water the seeds in the garden."}
response = self.app.put(TODO_ITEM_URL.format(1), data=data,
headers=headers)
# This converts the response from JSON to a python dict
response_decoded = response.data.decode("utf-8")
response_decoded = json.loads(response_decoded)
self.assertTrue(type(response.data) is bytes)
self.assertEqual(response.status_code, 200)
# id should be 1 because it is the first item in the database.
# Name should be the new value created above.
# The response should not be a list, but a dict.
self.assertEqual(
response_decoded,
{"id": 1, "name": "Water the seeds in the garden."})
def test_todo_put_unauthorized(self):
"""This checks to see if the Todo PUT api is working."""
with test_database(TEST_DB, (User, Todo)):
Todo.create(
name='Plant some seeds in the garden.'
)
# This converts a python dictionary to JSON to send to the API
data = {"id": 1, "name": "Water the seeds in the garden."}
response = self.app.put(TODO_ITEM_URL.format(1), data=data)
self.assertEqual(response.status_code, 401)
def test_todo_delete(self):
"""This checks to see if the Todo DELETE API is working."""
with test_database(TEST_DB, (User, Todo)):
User.user_create(
username='username',
password='password'
)
Todo.create(
name='Plant some seeds in the garden.'
)
response = self.app.delete(TODO_ITEM_URL.format(1),
headers=BASIC_AUTH_HEADERS)
self.assertEqual(response.status_code, 204)
# This tests to see if the Todo object is in the database.
self.assertEqual(Todo.select().count(), 0)
def test_todo_delete_token(self):
"""This checks to see if the Todo DELETE API is working."""
with test_database(TEST_DB, (User, Todo)):
user = User.user_create(
username='username',
password='password'
)
Todo.create(
name='Plant some seeds in the garden.'
)
token = user.generate_auth_token().decode('ascii')
headers = {'Authorization': 'Token {}'.format(token)}
response = self.app.delete(TODO_ITEM_URL.format(1),
headers=headers)
self.assertEqual(response.status_code, 204)
# This tests to see if the Todo object is in the database.
self.assertEqual(Todo.select().count(), 0)
def test_todo_delete_unauthorized(self):
"""This checks to see if the Todo DELETE API is working."""
with test_database(TEST_DB, (User, Todo)):
Todo.create(
name='Plant some seeds in the garden.'
)
response = self.app.delete(TODO_ITEM_URL.format(1))
self.assertEqual(response.status_code, 401)
if __name__ == '__main__':
unittest.main()
| 38.766585
| 78
| 0.575105
| 1,812
| 15,778
| 4.887417
| 0.108168
| 0.049119
| 0.03794
| 0.047425
| 0.810976
| 0.776874
| 0.751468
| 0.717593
| 0.707543
| 0.684395
| 0
| 0.009637
| 0.322601
| 15,778
| 406
| 79
| 38.862069
| 0.818956
| 0.176702
| 0
| 0.632143
| 0
| 0
| 0.127281
| 0.004913
| 0
| 0
| 0
| 0.002463
| 0.164286
| 1
| 0.078571
| false
| 0.096429
| 0.025
| 0
| 0.128571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
aa7486af2e77a01cbd8efa7a3152795930da5b32
| 175
|
py
|
Python
|
__init__.py
|
MarkosMuche/plotly-scientific-plots
|
be18dd2cc2ac77ad1761c94d33f51cb91af7b290
|
[
"MIT"
] | null | null | null |
__init__.py
|
MarkosMuche/plotly-scientific-plots
|
be18dd2cc2ac77ad1761c94d33f51cb91af7b290
|
[
"MIT"
] | null | null | null |
__init__.py
|
MarkosMuche/plotly-scientific-plots
|
be18dd2cc2ac77ad1761c94d33f51cb91af7b290
|
[
"MIT"
] | null | null | null |
import os
import sys
sys.path.append(os.path.abspath(os.path.join(__file__, '..')))
sys.path.append(os.path.abspath(os.path.join(__file__, '..', 'plotly_scientific_plots')))
| 29.166667
| 89
| 0.731429
| 27
| 175
| 4.37037
| 0.407407
| 0.20339
| 0.220339
| 0.254237
| 0.677966
| 0.677966
| 0.677966
| 0.677966
| 0.677966
| 0.677966
| 0
| 0
| 0.057143
| 175
| 5
| 90
| 35
| 0.715152
| 0
| 0
| 0
| 0
| 0
| 0.154286
| 0.131429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
aa8d625578bd4a6fa016766193b9faa92a056292
| 12,766
|
py
|
Python
|
project/tests/test_users.py
|
KevDi/flask_user_service
|
9a709a98518b2d88e2b5728bcbef6a1f1bf82604
|
[
"Apache-2.0"
] | null | null | null |
project/tests/test_users.py
|
KevDi/flask_user_service
|
9a709a98518b2d88e2b5728bcbef6a1f1bf82604
|
[
"Apache-2.0"
] | null | null | null |
project/tests/test_users.py
|
KevDi/flask_user_service
|
9a709a98518b2d88e2b5728bcbef6a1f1bf82604
|
[
"Apache-2.0"
] | null | null | null |
import json
from project.api.models import User
from project import db
from project.tests.base import BaseTestCase
import datetime
from project.tests.utils import add_user
class TestUserService(BaseTestCase):
"""Tests for the Users Service."""
def test_users(self):
"""Ensure the /ping route behaves correctly."""
response = self.client.get('/ping')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertIn('pong!', data['message'])
self.assertIn('success', data['status'])
def test_add_user(self):
"""Ensure a new user can be added to the database."""
add_user('test', 'test@test.com', 'test')
# update user
user = User.query.filter_by(email='test@test.com').first()
user.admin = True
db.session.commit()
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/users',
data=json.dumps(dict(
username='michael',
email='michael@realpython.com',
password='test'
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 201)
self.assertIn('michael@realpython.com was added!', data['message'])
self.assertIn('success', data['status'])
def test_add_user_invalid_json(self):
"""Ensure error is thrown if the JSON object is empty."""
add_user('test', 'test@test.com', 'test')
# update user
user = User.query.filter_by(email='test@test.com').first()
user.admin = True
db.session.commit()
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/users',
data=json.dumps(dict()),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid payload.', data['message'])
self.assertIn('fail', data['status'])
def test_add_user_invalid_json_keys(self):
"""
Ensure error is thrown if the JSON object does not have a username key.
"""
add_user('test', 'test@test.com', 'test')
# update user
user = User.query.filter_by(email='test@test.com').first()
user.admin = True
db.session.commit()
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/users',
data=json.dumps(dict(
email='michael@realpython.com',
password='test')),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid payload.', data['message'])
self.assertIn('fail', data['status'])
def test_add_user_invalid_json_keys_no_password(self):
"""
Ensure error is thrown if the JSON object does not have a password key.
"""
add_user('test', 'test@test.com', 'test')
# update user
user = User.query.filter_by(email='test@test.com').first()
user.admin = True
db.session.commit()
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/users',
data=json.dumps(dict(
username='michael',
email='michael@realpython.com')),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid payload.', data['message'])
self.assertIn('fail', data['status'])
def test_add_user_duplicate_email(self):
"""Ensure error is thrown if the email already exists."""
add_user('test', 'test@test.com', 'test')
# update user
user = User.query.filter_by(email='test@test.com').first()
user.admin = True
db.session.commit()
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
self.client.post(
'/users',
data=json.dumps(dict(
username='michael',
email='michael@realpython.com',
password='test'
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
response = self.client.post(
'/users',
data=json.dumps(dict(
username='michael',
email='michael@realpython.com',
password='test'
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn(
'Sorry. That email already exists.', data['message'])
self.assertIn('fail', data['status'])
def test_add_user_inactive(self):
add_user('test', 'test@test.com', 'test')
# update user
user = User.query.filter_by(email='test@test.com').first()
user.active = False
db.session.commit()
with self.client:
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/users',
data=json.dumps(dict(
username='michael',
email='michael@realpython.com',
password='test'
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'error')
self.assertTrue(
data['message'] == 'Something went wrong. Please contact us.')
self.assertEqual(response.status_code, 401)
def test_add_user_not_admin(self):
add_user('test', 'test@test.com', 'test')
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/users',
data=json.dumps(dict(
username='michael',
email='michael@realpython.com',
password='test'
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'error')
self.assertTrue(
data['message'] == 'You do not have permission to do that.')
self.assertEqual(response.status_code, 401)
def test_single_user(self):
"""Ensure get single user behaves correctly."""
user = add_user('michael', 'michael@realpython.com','test..123')
with self.client:
response = self.client.get(f'/users/{user.id}')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertTrue('created_at' in data['data'])
self.assertIn('michael', data['data']['username'])
self.assertIn('michael@realpython.com', data['data']['email'])
self.assertIn('success', data['status'])
def test_single_user_no_id(self):
"""Ensure error is thrown if an id is not provided."""
with self.client:
response = self.client.get('/users/blah')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('User does not exist', data['message'])
self.assertIn('fail', data['status'])
def test_single_user_incorrect_id(self):
"""Ensure error is thrown if the id does not exist."""
with self.client:
response = self.client.get('/users/999')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('User does not exist', data['message'])
self.assertIn('fail', data['status'])
def test_all_users(self):
"""Ensure get all users behaves correctly."""
created = datetime.datetime.utcnow() + datetime.timedelta(-30)
add_user('michael', 'michael@realpython.com', 'test..123',created)
add_user('fletcher', 'fletcher@realpython.com','test..123')
with self.client:
response = self.client.get('/users')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['users']), 2)
self.assertTrue('created_at' in data['data']['users'][0])
self.assertTrue('created_at' in data['data']['users'][1])
self.assertIn('michael', data['data']['users'][1]['username'])
self.assertIn(
'michael@realpython.com', data['data']['users'][1]['email'])
self.assertIn('fletcher', data['data']['users'][0]['username'])
self.assertIn(
'fletcher@realpython.com', data['data']['users'][0]['email'])
self.assertIn('success', data['status'])
| 38.920732
| 79
| 0.498355
| 1,236
| 12,766
| 5.056634
| 0.11165
| 0.0496
| 0.0352
| 0.0408
| 0.8608
| 0.84208
| 0.828
| 0.804
| 0.732
| 0.72592
| 0
| 0.007143
| 0.374902
| 12,766
| 327
| 80
| 39.039755
| 0.776065
| 0.053658
| 0
| 0.747368
| 0
| 0
| 0.158708
| 0.025881
| 0
| 0
| 0
| 0
| 0.154386
| 1
| 0.042105
| false
| 0.049123
| 0.021053
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.